diff --git a/venv/lib/python3.13/site-packages/certifi-2025.11.12.dist-info/INSTALLER b/venv/lib/python3.13/site-packages/certifi-2025.11.12.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.13/site-packages/certifi-2025.11.12.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.13/site-packages/certifi-2025.11.12.dist-info/METADATA b/venv/lib/python3.13/site-packages/certifi-2025.11.12.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..6939bac7dbcfd5f59ee78aa94b428a6e39cdacea --- /dev/null +++ b/venv/lib/python3.13/site-packages/certifi-2025.11.12.dist-info/METADATA @@ -0,0 +1,78 @@ +Metadata-Version: 2.4 +Name: certifi +Version: 2025.11.12 +Summary: Python package for providing Mozilla's CA Bundle. +Home-page: https://github.com/certifi/python-certifi +Author: Kenneth Reitz +Author-email: me@kennethreitz.com +License: MPL-2.0 +Project-URL: Source, https://github.com/certifi/python-certifi +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) +Classifier: Natural Language :: English +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Requires-Python: >=3.7 +License-File: LICENSE +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: home-page +Dynamic: license +Dynamic: license-file +Dynamic: project-url +Dynamic: requires-python +Dynamic: summary + +Certifi: Python SSL Certificates +================================ + +Certifi provides Mozilla's carefully curated collection of Root Certificates for +validating the trustworthiness of SSL certificates while verifying the identity +of TLS hosts. It has been extracted from the `Requests`_ project. + +Installation +------------ + +``certifi`` is available on PyPI. Simply install it with ``pip``:: + + $ pip install certifi + +Usage +----- + +To reference the installed certificate authority (CA) bundle, you can use the +built-in function:: + + >>> import certifi + + >>> certifi.where() + '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem' + +Or from the command line:: + + $ python -m certifi + /usr/local/lib/python3.7/site-packages/certifi/cacert.pem + +Enjoy! + +.. _`Requests`: https://requests.readthedocs.io/en/master/ + +Addition/Removal of Certificates +-------------------------------- + +Certifi does not support any addition/removal or other modification of the +CA trust store content. This project is intended to provide a reliable and +highly portable root of trust to python deployments. Look to upstream projects +for methods to use alternate trust. diff --git a/venv/lib/python3.13/site-packages/certifi-2025.11.12.dist-info/RECORD b/venv/lib/python3.13/site-packages/certifi-2025.11.12.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..969cbb50ee2cb352f3b3d0d34578ddab4285488a --- /dev/null +++ b/venv/lib/python3.13/site-packages/certifi-2025.11.12.dist-info/RECORD @@ -0,0 +1,14 @@ +certifi-2025.11.12.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +certifi-2025.11.12.dist-info/METADATA,sha256=_JprGu_1lWSdHlruRBKcorXnrfvBDhvX_6KRr8HQbLc,2475 +certifi-2025.11.12.dist-info/RECORD,, +certifi-2025.11.12.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91 +certifi-2025.11.12.dist-info/licenses/LICENSE,sha256=6TcW2mucDVpKHfYP5pWzcPBpVgPSH2-D8FPkLPwQyvc,989 +certifi-2025.11.12.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 +certifi/__init__.py,sha256=1BRSxNMnZW7CZ2oJtYWLoJgfHfcB9i273exwiPwfjJM,94 +certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243 +certifi/__pycache__/__init__.cpython-313.pyc,, +certifi/__pycache__/__main__.cpython-313.pyc,, +certifi/__pycache__/core.cpython-313.pyc,, +certifi/cacert.pem,sha256=oa1dZD4hxDtb7XTH4IkdzbWPavUcis4eTwINZUqlKhY,283932 +certifi/core.py,sha256=XFXycndG5pf37ayeF8N32HUuDafsyhkVMbO4BAPWHa0,3394 +certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.13/site-packages/certifi-2025.11.12.dist-info/WHEEL b/venv/lib/python3.13/site-packages/certifi-2025.11.12.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..e7fa31b6f3f78deb1022c1f7927f07d4d16da822 --- /dev/null +++ b/venv/lib/python3.13/site-packages/certifi-2025.11.12.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.13/site-packages/certifi-2025.11.12.dist-info/top_level.txt b/venv/lib/python3.13/site-packages/certifi-2025.11.12.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..963eac530b9bc28d704d1bc410299c68e3216d4d --- /dev/null +++ b/venv/lib/python3.13/site-packages/certifi-2025.11.12.dist-info/top_level.txt @@ -0,0 +1 @@ +certifi diff --git a/venv/lib/python3.13/site-packages/certifi/__init__.py b/venv/lib/python3.13/site-packages/certifi/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f11f5ae4c5c878e3b844e2b2a2c15fb64e2f60c1 --- /dev/null +++ b/venv/lib/python3.13/site-packages/certifi/__init__.py @@ -0,0 +1,4 @@ +from .core import contents, where + +__all__ = ["contents", "where"] +__version__ = "2025.11.12" diff --git a/venv/lib/python3.13/site-packages/certifi/__main__.py b/venv/lib/python3.13/site-packages/certifi/__main__.py new file mode 100644 index 0000000000000000000000000000000000000000..8945b5da857f4a7dec2b84f1225f012f6098418c --- /dev/null +++ b/venv/lib/python3.13/site-packages/certifi/__main__.py @@ -0,0 +1,12 @@ +import argparse + +from certifi import contents, where + +parser = argparse.ArgumentParser() +parser.add_argument("-c", "--contents", action="store_true") +args = parser.parse_args() + +if args.contents: + print(contents()) +else: + print(where()) diff --git a/venv/lib/python3.13/site-packages/certifi/cacert.pem b/venv/lib/python3.13/site-packages/certifi/cacert.pem new file mode 100644 index 0000000000000000000000000000000000000000..ebcb66fe6a8a6cbeb219495743aac28183d90cb2 --- /dev/null +++ b/venv/lib/python3.13/site-packages/certifi/cacert.pem @@ -0,0 +1,4678 @@ + +# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Label: "Entrust Root Certification Authority" +# Serial: 1164660820 +# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 +# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 +# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 +Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW +KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw +NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw +NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy +ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV +BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo +Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 +4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 +KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI +rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi +94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB +sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi +gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo +kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE +vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t +O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua +AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP +9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ +eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m +0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2" +# Serial: 1289 +# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b +# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 +# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 +-----BEGIN CERTIFICATE----- +MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa +GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg +Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J +WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB +rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp ++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 +ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i +Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz +PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og +/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH +oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI +yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud +EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 +A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL +MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT +ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f +BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn +g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl +fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K +WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha +B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc +hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR +TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD +mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z +ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y +4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza +8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3" +# Serial: 1478 +# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf +# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 +# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 +-----BEGIN CERTIFICATE----- +MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM +V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB +4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr +H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd +8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv +vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT +mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe +btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc +T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt +WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ +c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A +4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD +VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG +CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 +aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 +aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu +dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw +czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G +A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg +Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 +7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem +d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd ++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B +4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN +t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x +DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 +k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s +zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j +Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT +mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK +4SVhM7JZG+Ju1zdXtg2pEto= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Label: "SwissSign Gold CA - G2" +# Serial: 13492815561806991280 +# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 +# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 +# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 +-----BEGIN CERTIFICATE----- +MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln +biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF +MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT +d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 +76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ +bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c +6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE +emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd +MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt +MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y +MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y +FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi +aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM +gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB +qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 +lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn +8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov +L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 +45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO +UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 +O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC +bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv +GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a +77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC +hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 +92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp +Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w +ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt +Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ +-----END CERTIFICATE----- + +# Issuer: CN=SecureTrust CA O=SecureTrust Corporation +# Subject: CN=SecureTrust CA O=SecureTrust Corporation +# Label: "SecureTrust CA" +# Serial: 17199774589125277788362757014266862032 +# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 +# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 +# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz +MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv +cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz +Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO +0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao +wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj +7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS +8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT +BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg +JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 +6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ +3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm +D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS +CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR +3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= +-----END CERTIFICATE----- + +# Issuer: CN=Secure Global CA O=SecureTrust Corporation +# Subject: CN=Secure Global CA O=SecureTrust Corporation +# Label: "Secure Global CA" +# Serial: 9751836167731051554232119481456978597 +# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de +# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b +# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 +-----BEGIN CERTIFICATE----- +MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx +MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg +Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ +iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa +/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ +jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI +HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 +sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w +gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw +KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG +AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L +URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO +H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm +I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY +iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc +f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: CN=Certigna O=Dhimyotis +# Subject: CN=Certigna O=Dhimyotis +# Label: "Certigna" +# Serial: 18364802974209362175 +# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff +# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 +# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d +-----BEGIN CERTIFICATE----- +MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV +BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X +DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ +BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 +QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny +gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw +zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q +130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 +JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw +ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT +AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj +AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG +9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h +bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc +fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu +HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w +t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw +WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== +-----END CERTIFICATE----- + +# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Label: "ePKI Root Certification Authority" +# Serial: 28956088682735189655030529057352760477 +# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 +# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 +# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 +-----BEGIN CERTIFICATE----- +MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw +IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL +SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH +SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh +ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X +DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 +TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ +fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA +sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU +WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS +nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH +dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip +NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC +AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF +MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH +ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB +uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl +PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP +JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ +gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 +j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 +5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB +o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS +/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z +Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE +W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D +hNQ+IIX3Sj0rnP0qCglN6oH4EZw= +-----END CERTIFICATE----- + +# Issuer: O=certSIGN OU=certSIGN ROOT CA +# Subject: O=certSIGN OU=certSIGN ROOT CA +# Label: "certSIGN ROOT CA" +# Serial: 35210227249154 +# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 +# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b +# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT +AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD +QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP +MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do +0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ +UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d +RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ +OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv +JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C +AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O +BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ +LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY +MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ +44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I +Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw +i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN +9u6wWk5JRFRYX0KD +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" +# Serial: 80544274841616 +# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 +# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 +# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG +EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 +MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl +cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR +dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB +pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM +b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm +aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz +IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT +lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz +AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 +VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG +ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 +BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG +AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M +U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh +bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C ++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC +bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F +uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 +XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= +-----END CERTIFICATE----- + +# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Label: "Microsec e-Szigno Root CA 2009" +# Serial: 14014712776195784473 +# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 +# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e +# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 +-----BEGIN CERTIFICATE----- +MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD +VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 +ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G +CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y +OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx +FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp +Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o +dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP +kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc +cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U +fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 +N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC +xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 ++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM +Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG +SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h +mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk +ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 +tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c +2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t +HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=Izenpe.com O=IZENPE S.A. +# Subject: CN=Izenpe.com O=IZENPE S.A. +# Label: "Izenpe.com" +# Serial: 917563065490389241595536686991402621 +# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 +# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 +# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f +-----BEGIN CERTIFICATE----- +MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 +MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 +ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD +VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j +b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq +scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO +xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H +LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX +uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD +yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ +JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q +rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN +BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L +hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB +QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ +HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu +Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg +QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB +BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx +MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA +A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb +laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 +awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo +JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw +LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT +VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk +LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb +UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ +QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ +naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls +QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Commercial O=AffirmTrust +# Subject: CN=AffirmTrust Commercial O=AffirmTrust +# Label: "AffirmTrust Commercial" +# Serial: 8608355977964138876 +# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 +# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 +# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP +Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr +ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL +MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 +yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr +VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ +nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG +XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj +vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt +Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g +N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC +nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Networking O=AffirmTrust +# Subject: CN=AffirmTrust Networking O=AffirmTrust +# Label: "AffirmTrust Networking" +# Serial: 8957382827206547757 +# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f +# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f +# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y +YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua +kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL +QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp +6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG +yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i +QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO +tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu +QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ +Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u +olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 +x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium O=AffirmTrust +# Subject: CN=AffirmTrust Premium O=AffirmTrust +# Label: "AffirmTrust Premium" +# Serial: 7893706540734352110 +# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 +# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 +# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz +dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG +A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U +cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf +qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ +JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ ++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS +s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 +HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 +70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG +V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S +qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S +5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia +C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX +OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE +FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 +KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B +8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ +MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc +0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ +u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF +u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH +YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 +GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO +RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e +KeC2uAloGRwYQw== +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust +# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust +# Label: "AffirmTrust Premium ECC" +# Serial: 8401224907861490260 +# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d +# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb +# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC +VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ +cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ +BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt +VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D +0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 +ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G +A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs +aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I +flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA" +# Serial: 279744 +# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 +# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e +# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e +-----BEGIN CERTIFICATE----- +MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM +MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D +ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU +cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 +WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg +Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw +IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH +UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM +TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU +BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM +kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x +AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV +HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y +sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL +I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 +J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY +VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI +03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Label: "TWCA Root Certification Authority" +# Serial: 1 +# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 +# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 +# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES +MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU +V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz +WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO +LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE +AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH +K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX +RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z +rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx +3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq +hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC +MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls +XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D +lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn +aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ +YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Label: "Security Communication RootCA2" +# Serial: 0 +# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 +# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 +# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl +MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe +U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX +DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy +dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj +YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV +OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr +zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM +VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ +hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO +ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw +awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs +OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 +DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF +coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc +okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 +t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy +1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ +SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 +-----END CERTIFICATE----- + +# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Label: "Actalis Authentication Root CA" +# Serial: 6271844772424770508 +# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 +# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac +# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 +-----BEGIN CERTIFICATE----- +MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE +BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w +MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 +IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC +SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 +ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv +UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX +4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 +KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ +gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb +rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ +51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F +be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe +KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F +v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn +fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 +jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz +ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt +ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL +e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 +jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz +WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V +SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j +pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX +X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok +fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R +K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU +ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU +LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT +LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 2 Root CA" +# Serial: 2 +# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 +# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 +# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr +6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV +L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 +1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx +MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ +QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB +arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr +Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi +FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS +P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN +9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz +uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h +9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s +A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t +OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo ++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 +KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 +DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us +H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ +I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 +5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h +3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz +Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 3 Root CA" +# Serial: 2 +# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec +# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 +# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y +ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E +N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 +tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX +0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c +/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X +KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY +zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS +O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D +34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP +K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv +Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj +QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV +cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS +IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 +HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa +O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv +033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u +dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE +kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 +3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD +u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq +4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 3" +# Serial: 1 +# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef +# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 +# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN +8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ +RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 +hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 +ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM +EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 +A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy +WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ +1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 +6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT +91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml +e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p +TpPDpFQUWw== +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 2009" +# Serial: 623603 +# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f +# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 +# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 +-----BEGIN CERTIFICATE----- +MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha +ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM +HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 +UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 +tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R +ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM +lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp +/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G +A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G +A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj +dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy +MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl +cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js +L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL +BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni +acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 +o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K +zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 +PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y +Johw1+qRzT65ysCQblrGXnRl11z+o+I= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 EV 2009" +# Serial: 623604 +# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 +# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 +# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw +NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV +BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn +ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 +3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z +qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR +p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 +HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw +ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea +HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw +Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh +c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E +RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt +dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku +Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp +3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 +nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF +CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na +xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX +KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 +-----END CERTIFICATE----- + +# Issuer: CN=CA Disig Root R2 O=Disig a.s. +# Subject: CN=CA Disig Root R2 O=Disig a.s. +# Label: "CA Disig Root R2" +# Serial: 10572350602393338211 +# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 +# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 +# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV +BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu +MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy +MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx +EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw +ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe +NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH +PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I +x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe +QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR +yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO +QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 +H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ +QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD +i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs +nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 +rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI +hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM +tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf +GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb +lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka ++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal +TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i +nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 +gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr +G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os +zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x +L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL +-----END CERTIFICATE----- + +# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Label: "ACCVRAIZ1" +# Serial: 6828503384748696800 +# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 +# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 +# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 +-----BEGIN CERTIFICATE----- +MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE +AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw +CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ +BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND +VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb +qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY +HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo +G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA +lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr +IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ +0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH +k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 +4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO +m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa +cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl +uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI +KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls +ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG +AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 +VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT +VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG +CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA +cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA +QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA +7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA +cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA +QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA +czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu +aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt +aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud +DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF +BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp +D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU +JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m +AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD +vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms +tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH +7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h +I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA +h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF +d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H +pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA Global Root CA" +# Serial: 3262 +# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 +# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 +# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx +EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT +VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 +NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT +B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF +10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz +0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh +MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH +zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc +46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 +yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi +laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP +oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA +BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE +qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm +4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL +1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn +LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF +H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo +RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ +nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh +15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW +6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW +nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j +wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz +aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy +KwbQBM0= +-----END CERTIFICATE----- + +# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Label: "TeliaSonera Root CA v1" +# Serial: 199041966741090107964904287217786801558 +# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c +# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 +# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 +-----BEGIN CERTIFICATE----- +MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw +NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv +b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD +VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F +VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 +7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X +Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ +/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs +81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm +dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe +Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu +sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 +pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs +slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ +arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD +VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG +9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl +dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx +0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj +TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed +Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 +Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI +OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 +vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW +t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn +HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx +SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 2" +# Serial: 1 +# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a +# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 +# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd +AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC +FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi +1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq +jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ +wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ +WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy +NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC +uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw +IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 +g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN +9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP +BSeOE6Fuwg== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot 2011 O=Atos +# Subject: CN=Atos TrustedRoot 2011 O=Atos +# Label: "Atos TrustedRoot 2011" +# Serial: 6643877497813316402 +# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 +# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 +# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE +AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG +EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM +FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC +REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp +Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM +VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ +SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ +4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L +cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi +eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG +A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 +DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j +vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP +DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc +maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D +lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv +KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 1 G3" +# Serial: 687049649626669250736271037606554624078720034195 +# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab +# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 +# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 +MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV +wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe +rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 +68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh +4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp +UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o +abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc +3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G +KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt +hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO +Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt +zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD +ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC +MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 +cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN +qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 +YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv +b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 +8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k +NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj +ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp +q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt +nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2 G3" +# Serial: 390156079458959257446133169266079962026824725800 +# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 +# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 +# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 +MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf +qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW +n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym +c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ +O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 +o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j +IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq +IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz +8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh +vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l +7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG +cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD +ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 +AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC +roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga +W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n +lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE ++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV +csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd +dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg +KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM +HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 +WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3 G3" +# Serial: 268090761170461462463995952157327242137089239581 +# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 +# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d +# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 +MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR +/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu +FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR +U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c +ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR +FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k +A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw +eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl +sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp +VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q +A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ +ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD +ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px +KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI +FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv +oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg +u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP +0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf +3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl +8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ +DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN +PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ +ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G2" +# Serial: 15385348160840213938643033620894905419 +# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d +# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f +# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 +-----BEGIN CERTIFICATE----- +MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA +n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc +biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp +EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA +bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu +YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB +AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW +BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI +QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I +0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni +lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 +B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv +ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo +IhNzbM8m9Yop5w== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G3" +# Serial: 15459312981008553731928384953135426796 +# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb +# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 +# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 +-----BEGIN CERTIFICATE----- +MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg +RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf +Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q +RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD +AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY +JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv +6pZjamVFkpUBtA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G2" +# Serial: 4293743540046975378534879503202253541 +# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 +# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 +# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH +MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI +2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx +1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ +q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz +tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ +vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV +5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY +1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 +NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG +Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 +8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe +pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +MrY= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G3" +# Serial: 7089244469030293291760083333884364146 +# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca +# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e +# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 +-----BEGIN CERTIFICATE----- +MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe +Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw +EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x +IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG +fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO +Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd +BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx +AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ +oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 +sycX +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Trusted Root G4" +# Serial: 7451500558977370777930084869016614236 +# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 +# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 +# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 +-----BEGIN CERTIFICATE----- +MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg +RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y +ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If +xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV +ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO +DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ +jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ +CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi +EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM +fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY +uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK +chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t +9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD +ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 +SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd ++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc +fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa +sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N +cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N +0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie +4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI +r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 +/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm +gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ +-----END CERTIFICATE----- + +# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Label: "COMODO RSA Certification Authority" +# Serial: 101909084537582093308941363524873193117 +# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 +# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 +# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 +-----BEGIN CERTIFICATE----- +MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB +hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV +BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT +EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR +6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X +pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC +9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV +/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf +Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z ++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w +qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah +SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC +u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf +Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq +crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E +FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB +/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl +wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM +4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV +2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna +FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ +CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK +boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke +jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL +S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb +QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl +0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB +NVOFBkpdn627G190 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Label: "USERTrust RSA Certification Authority" +# Serial: 2645093764781058787591871645665788717 +# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 +# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e +# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 +-----BEGIN CERTIFICATE----- +MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB +iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl +cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV +BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw +MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV +BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B +3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY +tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ +Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 +VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT +79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 +c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT +Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l +c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee +UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE +Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd +BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G +A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF +Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO +VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 +ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs +8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR +iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze +Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ +XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ +qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB +VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB +L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG +jjxDah2nGN59PRbxYvnKkKj9 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Label: "USERTrust ECC Certification Authority" +# Serial: 123013823720199481456569720443997572134 +# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 +# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 +# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a +-----BEGIN CERTIFICATE----- +MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL +MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl +eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT +JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT +Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg +VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo +I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng +o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G +A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB +zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW +RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Label: "GlobalSign ECC Root CA - R5" +# Serial: 32785792099990507226680698011560947931244 +# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 +# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa +# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 +-----BEGIN CERTIFICATE----- +MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc +8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke +hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI +KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg +515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO +xwy8p2Fp8fc74SrL+SvzZpA3 +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Label: "IdenTrust Commercial Root CA 1" +# Serial: 13298821034946342390520003877796839426 +# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 +# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 +# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu +VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw +MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw +JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT +3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU ++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp +S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 +bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi +T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL +vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK +Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK +dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT +c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv +l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N +iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD +ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH +6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt +LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 +nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 ++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK +W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT +AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq +l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG +4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ +mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A +7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Label: "IdenTrust Public Sector Root CA 1" +# Serial: 13298821034946342390521976156843933698 +# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba +# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd +# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu +VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN +MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 +MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 +ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy +RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS +bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF +/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R +3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw +EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy +9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V +GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ +2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV +WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD +W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN +AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj +t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV +DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 +TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G +lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW +mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df +WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 ++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ +tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA +GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv +8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G2" +# Serial: 1246989352 +# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 +# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 +# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 +-----BEGIN CERTIFICATE----- +MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 +cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs +IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz +dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy +NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu +dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt +dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 +aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T +RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN +cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW +wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 +U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 +jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN +BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ +jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ +Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v +1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R +nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH +VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - EC1" +# Serial: 51543124481930649114116133369 +# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc +# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 +# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 +-----BEGIN CERTIFICATE----- +MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG +A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 +d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu +dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq +RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy +MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD +VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 +L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g +Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi +A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt +ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH +Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC +R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX +hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G +-----END CERTIFICATE----- + +# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority +# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority +# Label: "CFCA EV ROOT" +# Serial: 407555286 +# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 +# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 +# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD +TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y +aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx +MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j +aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP +T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 +sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL +TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 +/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp +7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz +EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt +hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP +a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot +aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg +TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV +PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv +cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL +tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd +BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB +ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT +ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL +jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS +ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy +P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 +xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d +Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN +5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe +/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z +AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ +5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GB CA" +# Serial: 157768595616588414422159278966750757568 +# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d +# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed +# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 +-----BEGIN CERTIFICATE----- +MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt +MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg +Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i +YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x +CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG +b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh +bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 +HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx +WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX +1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk +u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P +99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r +M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB +BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh +cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 +gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO +ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf +aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic +Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= +-----END CERTIFICATE----- + +# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Label: "SZAFIR ROOT CA2" +# Serial: 357043034767186914217277344587386743377558296292 +# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 +# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de +# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 +ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw +NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L +cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg +Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN +QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT +3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw +3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 +3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 +BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN +XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF +AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw +8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG +nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP +oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy +d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg +LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA 2" +# Serial: 44979900017204383099463764357512596969 +# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 +# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 +# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 +-----BEGIN CERTIFICATE----- +MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB +gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu +QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG +A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz +OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ +VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 +b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA +DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn +0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB +OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE +fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E +Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m +o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i +sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW +OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez +Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS +adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n +3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ +F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf +CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 +XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm +djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ +WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb +AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq +P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko +b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj +XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P +5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi +DrW5viSP +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce +# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 +# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 +-----BEGIN CERTIFICATE----- +MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix +DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k +IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT +N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v +dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG +A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh +ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx +QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA +4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 +AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 +4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C +ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV +9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD +gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 +Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq +NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko +LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc +Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd +ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I +XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI +M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot +9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V +Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea +j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh +X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ +l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf +bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 +pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK +e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 +vm9qp/UsQu0yrbYhnr68 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef +# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 +# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 +-----BEGIN CERTIFICATE----- +MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN +BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl +bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv +b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ +BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj +YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 +MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 +dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg +QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa +jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi +C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep +lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof +TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X1 O=Internet Security Research Group +# Subject: CN=ISRG Root X1 O=Internet Security Research Group +# Label: "ISRG Root X1" +# Serial: 172886928669790476064670243504169061120 +# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e +# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 +# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw +TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh +cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 +WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu +ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc +h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ +0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U +A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW +T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH +B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC +B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv +KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn +OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn +jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw +qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI +rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq +hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ +3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK +NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 +ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur +TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC +jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc +oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq +4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA +mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d +emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- + +# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Label: "AC RAIZ FNMT-RCM" +# Serial: 485876308206448804701554682760554759 +# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d +# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 +# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx +CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ +WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ +BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG +Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ +yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf +BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz +WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF +tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z +374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC +IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL +mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 +wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS +MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 +ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet +UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H +YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 +LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD +nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 +RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM +LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf +77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N +JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm +fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp +6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp +1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B +9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok +RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv +uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 1 O=Amazon +# Subject: CN=Amazon Root CA 1 O=Amazon +# Label: "Amazon Root CA 1" +# Serial: 143266978916655856878034712317230054538369994 +# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 +# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 +# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e +-----BEGIN CERTIFICATE----- +MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj +ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM +9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw +IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 +VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L +93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm +jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA +A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI +U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs +N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv +o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU +5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy +rqXRfboQnoZsG4q5WTP468SQvvG5 +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 2 O=Amazon +# Subject: CN=Amazon Root CA 2 O=Amazon +# Label: "Amazon Root CA 2" +# Serial: 143266982885963551818349160658925006970653239 +# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 +# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a +# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK +gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ +W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg +1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K +8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r +2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me +z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR +8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj +mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz +7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 ++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI +0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm +UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 +LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY ++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS +k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl +7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm +btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl +urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ +fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 +n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE +76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H +9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT +4PsJYGw= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 3 O=Amazon +# Subject: CN=Amazon Root CA 3 O=Amazon +# Label: "Amazon Root CA 3" +# Serial: 143266986699090766294700635381230934788665930 +# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 +# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e +# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 +-----BEGIN CERTIFICATE----- +MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl +ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr +ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr +BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM +YyRIHN8wfdVoOw== +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 4 O=Amazon +# Subject: CN=Amazon Root CA 4 O=Amazon +# Label: "Amazon Root CA 4" +# Serial: 143266989758080763974105200630763877849284878 +# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd +# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be +# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 +-----BEGIN CERTIFICATE----- +MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi +9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk +M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB +MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw +CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW +1KyLa2tJElMzrdfkviT8tQp21KW8EA== +-----END CERTIFICATE----- + +# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" +# Serial: 1 +# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 +# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca +# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 +-----BEGIN CERTIFICATE----- +MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx +GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp +bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w +KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 +BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy +dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG +EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll +IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU +QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT +TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg +LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 +a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr +LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr +N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X +YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ +iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f +AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH +V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh +AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf +IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 +lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c +8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf +lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= +-----END CERTIFICATE----- + +# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Label: "GDCA TrustAUTH R5 ROOT" +# Serial: 9009899650740120186 +# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 +# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 +# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 +-----BEGIN CERTIFICATE----- +MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE +BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ +IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 +MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV +BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w +HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj +Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj +TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u +KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj +qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm +MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 +ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP +zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk +L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC +jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA +HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC +AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg +p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm +DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 +COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry +L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf +JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg +IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io +2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV +09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ +XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq +T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe +MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Label: "SSL.com Root Certification Authority RSA" +# Serial: 8875640296558310041 +# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 +# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb +# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 +-----BEGIN CERTIFICATE----- +MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE +BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK +DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz +OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv +bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R +xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX +qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC +C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 +6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh +/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF +YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E +JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc +US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 +ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm ++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi +M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G +A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV +cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc +Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs +PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ +q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 +cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr +a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I +H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y +K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu +nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf +oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY +Ic2wBlX7Jz9TkHCpBB5XJ7k= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com Root Certification Authority ECC" +# Serial: 8495723813297216424 +# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e +# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a +# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 +-----BEGIN CERTIFICATE----- +MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz +WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 +b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS +b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI +7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg +CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud +EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD +VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T +kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ +gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority RSA R2" +# Serial: 6248227494352943350 +# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 +# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a +# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c +-----BEGIN CERTIFICATE----- +MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV +BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE +CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy +MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G +A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD +DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq +M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf +OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa +4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 +HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR +aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA +b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ +Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV +PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO +pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu +UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY +MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV +HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 +9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW +s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 +Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg +cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM +79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz +/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt +ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm +Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK +QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ +w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi +S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 +mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority ECC" +# Serial: 3182246526754555285 +# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 +# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d +# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 +-----BEGIN CERTIFICATE----- +MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx +NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv +bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA +VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku +WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX +5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ +ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg +h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Label: "GlobalSign Root CA - R6" +# Serial: 1417766617973444989252670301619537 +# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae +# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 +# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg +MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx +MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET +MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI +xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k +ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD +aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw +LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw +1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX +k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 +SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h +bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n +WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY +rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce +MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu +bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN +nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt +Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 +55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj +vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf +cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz +oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp +nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs +pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v +JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R +8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 +5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GC CA" +# Serial: 44084345621038548146064804565436152554 +# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 +# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 +# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d +-----BEGIN CERTIFICATE----- +MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw +CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 +bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg +Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ +BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu +ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS +b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni +eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W +p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T +rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV +57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg +Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 +-----END CERTIFICATE----- + +# Issuer: CN=UCA Global G2 Root O=UniTrust +# Subject: CN=UCA Global G2 Root O=UniTrust +# Label: "UCA Global G2 Root" +# Serial: 124779693093741543919145257850076631279 +# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 +# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a +# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH +bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x +CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds +b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr +b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 +kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm +VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R +VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc +C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj +tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY +D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv +j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl +NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 +iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP +O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV +ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj +L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 +1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl +1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU +b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV +PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj +y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb +EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg +DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI ++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy +YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX +UB+K+wb1whnw0A== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Extended Validation Root O=UniTrust +# Subject: CN=UCA Extended Validation Root O=UniTrust +# Label: "UCA Extended Validation Root" +# Serial: 106100277556486529736699587978573607008 +# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 +# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a +# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF +eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx +MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV +BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog +D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS +sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop +O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk +sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi +c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj +VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz +KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ +TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G +sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs +1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD +fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T +AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN +l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR +ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ +VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 +c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp +4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s +t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj +2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO +vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C +xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx +cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM +fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax +-----END CERTIFICATE----- + +# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Label: "Certigna Root CA" +# Serial: 269714418870597844693661054334862075617 +# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 +# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 +# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 +-----BEGIN CERTIFICATE----- +MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw +WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw +MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x +MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD +VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX +BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw +ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO +ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M +CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu +I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm +TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh +C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf +ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz +IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT +Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k +JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 +hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB +GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of +1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov +L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo +dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr +aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq +hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L +6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG +HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 +0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB +lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi +o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 +gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v +faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 +Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh +jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw +3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign Root CA - G1" +# Serial: 235931866688319308814040 +# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac +# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c +# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 +-----BEGIN CERTIFICATE----- +MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD +VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU +ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH +MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO +MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv +Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz +f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO +8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq +d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM +tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt +Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB +o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD +AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x +PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM +wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d +GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH +6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby +RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx +iN66zB+Afko= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign ECC Root CA - G3" +# Serial: 287880440101571086945156 +# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 +# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 +# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b +-----BEGIN CERTIFICATE----- +MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG +EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo +bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g +RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ +TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s +b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 +WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS +fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB +zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq +hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB +CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD ++JbNR6iC8hZVdyR+EhCVBCyj +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Label: "emSign Root CA - C1" +# Serial: 825510296613316004955058 +# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 +# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 +# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f +-----BEGIN CERTIFICATE----- +MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG +A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg +SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v +dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ +BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ +HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH +3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH +GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c +xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 +aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq +TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 +/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 +kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG +YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT ++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo +WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Label: "emSign ECC Root CA - C3" +# Serial: 582948710642506000014504 +# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 +# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 +# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 +-----BEGIN CERTIFICATE----- +MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG +EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx +IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND +IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci +MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti +sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O +BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB +Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c +3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J +0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Label: "Hongkong Post Root CA 3" +# Serial: 46170865288971385588281144162979347873371282084 +# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 +# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 +# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 +-----BEGIN CERTIFICATE----- +MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL +BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ +SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n +a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 +NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT +CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u +Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO +dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI +VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV +9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY +2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY +vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt +bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb +x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ +l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK +TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj +Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e +i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw +DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG +7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk +MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr +gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk +GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS +3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm +Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ +l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c +JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP +L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa +LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG +mpv0 +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft ECC Root Certificate Authority 2017" +# Serial: 136839042543790627607696632466672567020 +# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 +# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 +# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 +-----BEGIN CERTIFICATE----- +MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD +VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw +MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV +UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy +b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR +ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb +hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 +FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV +L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB +iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft RSA Root Certificate Authority 2017" +# Serial: 40975477897264996090493496164228220339 +# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 +# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 +# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 +-----BEGIN CERTIFICATE----- +MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl +MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw +NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 +IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG +EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N +aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ +Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 +ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 +HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm +gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ +jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc +aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG +YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 +W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K +UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH ++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q +W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC +LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC +gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 +tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh +SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 +TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 +pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR +xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp +GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 +dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN +AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB +RA+GsCyRxj3qrg+E +-----END CERTIFICATE----- + +# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Label: "e-Szigno Root CA 2017" +# Serial: 411379200276854331539784714 +# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 +# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 +# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 +-----BEGIN CERTIFICATE----- +MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV +BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk +LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv +b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ +BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg +THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v +IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv +xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H +Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB +eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo +jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ ++efcMQ== +-----END CERTIFICATE----- + +# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Label: "certSIGN Root CA G2" +# Serial: 313609486401300475190 +# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 +# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 +# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV +BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g +Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ +BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ +R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF +dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw +vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ +uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp +n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs +cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW +xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P +rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF +DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx +DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy +LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C +eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ +d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq +kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC +b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl +qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 +OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c +NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk +ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO +pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj +03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk +PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE +1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX +QRBdJ3NghVdJIgc= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global Certification Authority" +# Serial: 1846098327275375458322922162 +# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e +# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 +# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 +-----BEGIN CERTIFICATE----- +MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw +CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x +ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 +c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx +OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI +SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI +b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn +swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu +7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 +1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW +80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP +JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l +RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw +hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 +coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc +BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n +twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud +EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud +DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W +0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe +uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q +lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB +aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE +sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT +MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe +qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh +VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 +h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 +EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK +yeC2nOnOcXHebD8WpHk= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P256 Certification Authority" +# Serial: 4151900041497450638097112925 +# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 +# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf +# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 +-----BEGIN CERTIFICATE----- +MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG +SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN +FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w +DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw +CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh +DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P384 Certification Authority" +# Serial: 2704997926503831671788816187 +# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 +# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 +# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 +-----BEGIN CERTIFICATE----- +MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB +BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ +j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF +1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G +A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 +AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC +MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu +Sw== +-----END CERTIFICATE----- + +# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Label: "NAVER Global Root Certification Authority" +# Serial: 9013692873798656336226253319739695165984492813 +# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b +# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 +# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 +-----BEGIN CERTIFICATE----- +MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM +BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG +T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx +CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD +b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA +iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH +38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE +HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz +kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP +szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq +vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf +nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG +YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo +0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a +CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K +AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I +36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB +Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN +qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj +cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm ++LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL +hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe +lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 +p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 +piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR +LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX +5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO +dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul +9XXeifdy +-----END CERTIFICATE----- + +# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS" +# Serial: 131542671362353147877283741781055151509 +# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb +# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a +# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb +-----BEGIN CERTIFICATE----- +MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw +CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw +FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S +Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5 +MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL +DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS +QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH +sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK +Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu +SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC +MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy +v+c= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Label: "GlobalSign Root R46" +# Serial: 1552617688466950547958867513931858518042577 +# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef +# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90 +# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA +MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD +VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy +MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt +c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ +OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG +vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud +316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo +0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE +y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF +zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE ++cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN +I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs +x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa +ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC +4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4 +7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg +JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti +2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk +pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF +FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt +rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk +ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5 +u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP +4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6 +N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3 +vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6 +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Label: "GlobalSign Root E46" +# Serial: 1552617690338932563915843282459653771421763 +# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f +# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84 +# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58 +-----BEGIN CERTIFICATE----- +MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx +CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD +ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw +MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex +HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq +R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd +yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ +7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8 ++RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= +-----END CERTIFICATE----- + +# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Label: "ANF Secure Server Root CA" +# Serial: 996390341000653745 +# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96 +# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74 +# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99 +-----BEGIN CERTIFICATE----- +MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV +BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk +YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV +BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN +MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF +UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD +VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v +dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj +cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q +yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH +2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX +H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL +zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR +p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz +W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/ +SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn +LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3 +n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B +u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj +o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC +AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L +9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej +rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK +pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0 +vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq +OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ +/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9 +2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI ++PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2 +MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo +tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw= +-----END CERTIFICATE----- + +# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum EC-384 CA" +# Serial: 160250656287871593594747141429395092468 +# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1 +# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed +# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6 +-----BEGIN CERTIFICATE----- +MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw +CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw +JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT +EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0 +WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT +LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX +BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE +KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm +Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8 +EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J +UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn +nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k= +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Root CA" +# Serial: 40870380103424195783807378461123655149 +# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29 +# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5 +# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd +-----BEGIN CERTIFICATE----- +MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6 +MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu +MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV +BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw +MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg +U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo +b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ +n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q +p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq +NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF +8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3 +HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa +mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi +7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF +ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P +qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ +v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6 +Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1 +vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD +ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4 +WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo +zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR +5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ +GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf +5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq +0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D +P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM +qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP +0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf +E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb +-----END CERTIFICATE----- + +# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Label: "TunTrust Root CA" +# Serial: 108534058042236574382096126452369648152337120275 +# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4 +# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb +# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL +BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg +Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv +b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG +EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u +IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ +n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd +2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF +VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ +GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF +li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU +r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2 +eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb +MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg +jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB +7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW +5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE +ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0 +90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z +xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu +QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4 +FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH +22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP +xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn +dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5 +Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b +nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ +CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH +u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj +d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS RSA Root CA 2021" +# Serial: 76817823531813593706434026085292783742 +# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91 +# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d +# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d +-----BEGIN CERTIFICATE----- +MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs +MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg +Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL +MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl +YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv +b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l +mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE +4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv +a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M +pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw +Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b +LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY +AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB +AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq +E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr +W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ +CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE +AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU +X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3 +f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja +H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP +JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P +zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt +jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0 +/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT +BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79 +aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW +xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU +63ZTGI0RmLo= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS ECC Root CA 2021" +# Serial: 137515985548005187474074462014555733966 +# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0 +# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48 +# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01 +-----BEGIN CERTIFICATE----- +MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw +CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh +cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v +dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG +A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj +aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg +Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7 +KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y +STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD +AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw +SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN +nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 1977337328857672817 +# MD5 Fingerprint: 4e:6e:9b:54:4c:ca:b7:fa:48:e4:90:b1:15:4b:1c:a3 +# SHA1 Fingerprint: 0b:be:c2:27:22:49:cb:39:aa:db:35:5c:53:e3:8c:ae:78:ff:b6:fe +# SHA256 Fingerprint: 57:de:05:83:ef:d2:b2:6e:03:61:da:99:da:9d:f4:64:8d:ef:7e:e8:44:1c:3b:72:8a:fa:9b:cd:e0:f9:b2:6a +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIG3Dp0v+ubHEwDQYJKoZIhvcNAQELBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0xNDA5MjMxNTIyMDdaFw0zNjA1 +MDUxNTIyMDdaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMB0GA1UdDgQWBBRlzeurNR4APn7VdMAc +tHNHDhpkLzASBgNVHRMBAf8ECDAGAQH/AgEBMIGmBgNVHSAEgZ4wgZswgZgGBFUd +IAAwgY8wLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuZmlybWFwcm9mZXNpb25hbC5j +b20vY3BzMFwGCCsGAQUFBwICMFAeTgBQAGEAcwBlAG8AIABkAGUAIABsAGEAIABC +AG8AbgBhAG4AbwB2AGEAIAA0ADcAIABCAGEAcgBjAGUAbABvAG4AYQAgADAAOAAw +ADEANzAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAHSHKAIrdx9m +iWTtj3QuRhy7qPj4Cx2Dtjqn6EWKB7fgPiDL4QjbEwj4KKE1soCzC1HA01aajTNF +Sa9J8OA9B3pFE1r/yJfY0xgsfZb43aJlQ3CTkBW6kN/oGbDbLIpgD7dvlAceHabJ +hfa9NPhAeGIQcDq+fUs5gakQ1JZBu/hfHAsdCPKxsIl68veg4MSPi3i1O1ilI45P +Vf42O+AMt8oqMEEgtIDNrvx2ZnOorm7hfNoD6JQg5iKj0B+QXSBTFCZX2lSX3xZE +EAEeiGaPcjiT3SC3NL7X8e5jjkd5KAb881lFJWAiMxujX6i6KtoaPc1A6ozuBRWV +1aUsIC+nmCjuRfzxuIgALI9C2lHVnOUTaHFFQ4ueCyE8S1wF3BqfmI7avSKecs2t +CsvMo2ebKHTEm9caPARYpoKdrcd7b/+Alun4jWq9GJAd/0kakFI3ky88Al2CdgtR +5xbHV/g4+afNmyJU72OwFW1TZQNKXkqgsqeOSQBZONXH9IBk9W6VULgRfhVwOEqw +f9DEMnDAGf/JOC0ULGb0QkTmVXYbgBVX/8Cnp6o5qtjTcNAuuuuUavpfNIbnYrX9 +ivAwhZTJryQCL2/W3Wf+47BVTwSYT6RBVuKT0Gro1vP7ZeDOdcQxWQzugsgMYDNK +GbqEZycPvEJdvSRUDewdcAZfpLz6IHxV +-----END CERTIFICATE----- + +# Issuer: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus ECC Root CA" +# Serial: 630369271402956006249506845124680065938238527194 +# MD5 Fingerprint: de:4b:c1:f5:52:8c:9b:43:e1:3e:8f:55:54:17:8d:85 +# SHA1 Fingerprint: f6:9c:db:b0:fc:f6:02:13:b6:52:32:a6:a3:91:3f:16:70:da:c3:e1 +# SHA256 Fingerprint: 30:fb:ba:2c:32:23:8e:2a:98:54:7a:f9:79:31:e5:50:42:8b:9b:3f:1c:8e:eb:66:33:dc:fa:86:c5:b2:7d:d3 +-----BEGIN CERTIFICATE----- +MIICDzCCAZWgAwIBAgIUbmq8WapTvpg5Z6LSa6Q75m0c1towCgYIKoZIzj0EAwMw +RzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4xGjAY +BgNVBAMTEXZUcnVzIEVDQyBSb290IENBMB4XDTE4MDczMTA3MjY0NFoXDTQzMDcz +MTA3MjY0NFowRzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28u +LEx0ZC4xGjAYBgNVBAMTEXZUcnVzIEVDQyBSb290IENBMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAEZVBKrox5lkqqHAjDo6LN/llWQXf9JpRCux3NCNtzslt188+cToL0 +v/hhJoVs1oVbcnDS/dtitN9Ti72xRFhiQgnH+n9bEOf+QP3A2MMrMudwpremIFUd +e4BdS49nTPEQo0IwQDAdBgNVHQ4EFgQUmDnNvtiyjPeyq+GtJK97fKHbH88wDwYD +VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwCgYIKoZIzj0EAwMDaAAwZQIw +V53dVvHH4+m4SVBrm2nDb+zDfSXkV5UTQJtS0zvzQBm8JsctBp61ezaf9SXUY2sA +AjEA6dPGnlaaKsyh2j/IZivTWJwghfqrkYpwcBE4YGQLYgmRWAD5Tfs0aNoJrSEG +GJTO +-----END CERTIFICATE----- + +# Issuer: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus Root CA" +# Serial: 387574501246983434957692974888460947164905180485 +# MD5 Fingerprint: b8:c9:37:df:fa:6b:31:84:64:c5:ea:11:6a:1b:75:fc +# SHA1 Fingerprint: 84:1a:69:fb:f5:cd:1a:25:34:13:3d:e3:f8:fc:b8:99:d0:c9:14:b7 +# SHA256 Fingerprint: 8a:71:de:65:59:33:6f:42:6c:26:e5:38:80:d0:0d:88:a1:8d:a4:c6:a9:1f:0d:cb:61:94:e2:06:c5:c9:63:87 +-----BEGIN CERTIFICATE----- +MIIFVjCCAz6gAwIBAgIUQ+NxE9izWRRdt86M/TX9b7wFjUUwDQYJKoZIhvcNAQEL +BQAwQzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4x +FjAUBgNVBAMTDXZUcnVzIFJvb3QgQ0EwHhcNMTgwNzMxMDcyNDA1WhcNNDMwNzMx +MDcyNDA1WjBDMQswCQYDVQQGEwJDTjEcMBoGA1UEChMTaVRydXNDaGluYSBDby4s +THRkLjEWMBQGA1UEAxMNdlRydXMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD +ggIPADCCAgoCggIBAL1VfGHTuB0EYgWgrmy3cLRB6ksDXhA/kFocizuwZotsSKYc +IrrVQJLuM7IjWcmOvFjai57QGfIvWcaMY1q6n6MLsLOaXLoRuBLpDLvPbmyAhykU +AyyNJJrIZIO1aqwTLDPxn9wsYTwaP3BVm60AUn/PBLn+NvqcwBauYv6WTEN+VRS+ +GrPSbcKvdmaVayqwlHeFXgQPYh1jdfdr58tbmnDsPmcF8P4HCIDPKNsFxhQnL4Z9 +8Cfe/+Z+M0jnCx5Y0ScrUw5XSmXX+6KAYPxMvDVTAWqXcoKv8R1w6Jz1717CbMdH +flqUhSZNO7rrTOiwCcJlwp2dCZtOtZcFrPUGoPc2BX70kLJrxLT5ZOrpGgrIDajt +J8nU57O5q4IikCc9Kuh8kO+8T/3iCiSn3mUkpF3qwHYw03dQ+A0Em5Q2AXPKBlim +0zvc+gRGE1WKyURHuFE5Gi7oNOJ5y1lKCn+8pu8fA2dqWSslYpPZUxlmPCdiKYZN +pGvu/9ROutW04o5IWgAZCfEF2c6Rsffr6TlP9m8EQ5pV9T4FFL2/s1m02I4zhKOQ +UqqzApVg+QxMaPnu1RcN+HFXtSXkKe5lXa/R7jwXC1pDxaWG6iSe4gUH3DRCEpHW +OXSuTEGC2/KmSNGzm/MzqvOmwMVO9fSddmPmAsYiS8GVP1BkLFTltvA8Kc9XAgMB +AAGjQjBAMB0GA1UdDgQWBBRUYnBj8XWEQ1iO0RYgscasGrz2iTAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAKbqSSaet +8PFww+SX8J+pJdVrnjT+5hpk9jprUrIQeBqfTNqK2uwcN1LgQkv7bHbKJAs5EhWd +nxEt/Hlk3ODg9d3gV8mlsnZwUKT+twpw1aA08XXXTUm6EdGz2OyC/+sOxL9kLX1j +bhd47F18iMjrjld22VkE+rxSH0Ws8HqA7Oxvdq6R2xCOBNyS36D25q5J08FsEhvM +Kar5CKXiNxTKsbhm7xqC5PD48acWabfbqWE8n/Uxy+QARsIvdLGx14HuqCaVvIiv +TDUHKgLKeBRtRytAVunLKmChZwOgzoy8sHJnxDHO2zTlJQNgJXtxmOTAGytfdELS +S8VZCAeHvsXDf+eW2eHcKJfWjwXj9ZtOyh1QRwVTsMo554WgicEFOwE30z9J4nfr +I8iIZjs9OXYhRvHsXyO466JmdXTBQPfYaJqT4i2pLr0cox7IdMakLXogqzu4sEb9 +b91fUlV1YvCXoHzXOP0l382gmxDPi7g4Xl7FtKYCNqEeXxzP4padKar9mK5S4fNB +UvupLnKWnyfjqnN9+BojZns7q2WwMgFLFT49ok8MKzWixtlnEjUwzXYuFrOZnk1P +Ti07NEPhmg4NpGaXutIcSkwsKouLgU9xGqndXHt7CMUADTdA43x7VF8vhV929ven +sBxXVsFy6K2ir40zSbofitzmdHxghm+Hl3s= +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X2 O=Internet Security Research Group +# Subject: CN=ISRG Root X2 O=Internet Security Research Group +# Label: "ISRG Root X2" +# Serial: 87493402998870891108772069816698636114 +# MD5 Fingerprint: d3:9e:c4:1e:23:3c:a6:df:cf:a3:7e:6d:e0:14:e6:e5 +# SHA1 Fingerprint: bd:b1:b9:3c:d5:97:8d:45:c6:26:14:55:f8:db:95:c7:5a:d1:53:af +# SHA256 Fingerprint: 69:72:9b:8e:15:a8:6e:fc:17:7a:57:af:b7:17:1d:fc:64:ad:d2:8c:2f:ca:8c:f1:50:7e:34:45:3c:cb:14:70 +-----BEGIN CERTIFICATE----- +MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw +CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg +R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00 +MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT +ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw +EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW ++1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9 +ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI +zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW +tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1 +/q4AaOeMSQ+2b1tbFfLn +-----END CERTIFICATE----- + +# Issuer: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Subject: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Label: "HiPKI Root CA - G1" +# Serial: 60966262342023497858655262305426234976 +# MD5 Fingerprint: 69:45:df:16:65:4b:e8:68:9a:8f:76:5f:ff:80:9e:d3 +# SHA1 Fingerprint: 6a:92:e4:a8:ee:1b:ec:96:45:37:e3:29:57:49:cd:96:e3:e5:d2:60 +# SHA256 Fingerprint: f0:15:ce:3c:c2:39:bf:ef:06:4b:e9:f1:d2:c4:17:e1:a0:26:4a:0a:94:be:1f:0c:8d:12:18:64:eb:69:49:cc +-----BEGIN CERTIFICATE----- +MIIFajCCA1KgAwIBAgIQLd2szmKXlKFD6LDNdmpeYDANBgkqhkiG9w0BAQsFADBP +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xGzAZBgNVBAMMEkhpUEtJIFJvb3QgQ0EgLSBHMTAeFw0xOTAyMjIwOTQ2MDRa +Fw0zNzEyMzExNTU5NTlaME8xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3 +YSBUZWxlY29tIENvLiwgTHRkLjEbMBkGA1UEAwwSSGlQS0kgUm9vdCBDQSAtIEcx +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9B5/UnMyDHPkvRN0o9Qw +qNCuS9i233VHZvR85zkEHmpwINJaR3JnVfSl6J3VHiGh8Ge6zCFovkRTv4354twv +Vcg3Px+kwJyz5HdcoEb+d/oaoDjq7Zpy3iu9lFc6uux55199QmQ5eiY29yTw1S+6 +lZgRZq2XNdZ1AYDgr/SEYYwNHl98h5ZeQa/rh+r4XfEuiAU+TCK72h8q3VJGZDnz +Qs7ZngyzsHeXZJzA9KMuH5UHsBffMNsAGJZMoYFL3QRtU6M9/Aes1MU3guvklQgZ +KILSQjqj2FPseYlgSGDIcpJQ3AOPgz+yQlda22rpEZfdhSi8MEyr48KxRURHH+CK +FgeW0iEPU8DtqX7UTuybCeyvQqww1r/REEXgphaypcXTT3OUM3ECoWqj1jOXTyFj +HluP2cFeRXF3D4FdXyGarYPM+l7WjSNfGz1BryB1ZlpK9p/7qxj3ccC2HTHsOyDr +y+K49a6SsvfhhEvyovKTmiKe0xRvNlS9H15ZFblzqMF8b3ti6RZsR1pl8w4Rm0bZ +/W3c1pzAtH2lsN0/Vm+h+fbkEkj9Bn8SV7apI09bA8PgcSojt/ewsTu8mL3WmKgM +a/aOEmem8rJY5AIJEzypuxC00jBF8ez3ABHfZfjcK0NVvxaXxA/VLGGEqnKG/uY6 +fsI/fe78LxQ+5oXdUG+3Se0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQU8ncX+l6o/vY9cdVouslGDDjYr7AwDgYDVR0PAQH/BAQDAgGGMA0GCSqG +SIb3DQEBCwUAA4ICAQBQUfB13HAE4/+qddRxosuej6ip0691x1TPOhwEmSKsxBHi +7zNKpiMdDg1H2DfHb680f0+BazVP6XKlMeJ45/dOlBhbQH3PayFUhuaVevvGyuqc +SE5XCV0vrPSltJczWNWseanMX/mF+lLFjfiRFOs6DRfQUsJ748JzjkZ4Bjgs6Fza +ZsT0pPBWGTMpWmWSBUdGSquEwx4noR8RkpkndZMPvDY7l1ePJlsMu5wP1G4wB9Tc +XzZoZjmDlicmisjEOf6aIW/Vcobpf2Lll07QJNBAsNB1CI69aO4I1258EHBGG3zg +iLKecoaZAeO/n0kZtCW+VmWuF2PlHt/o/0elv+EmBYTksMCv5wiZqAxeJoBF1Pho +L5aPruJKHJwWDBNvOIf2u8g0X5IDUXlwpt/L9ZlNec1OvFefQ05rLisY+GpzjLrF +Ne85akEez3GoorKGB1s6yeHvP2UEgEcyRHCVTjFnanRbEEV16rCf0OY1/k6fi8wr +kkVbbiVghUbN0aqwdmaTd5a+g744tiROJgvM7XpWGuDpWsZkrUx6AEhEL7lAuxM+ +vhV4nYWBSipX3tUZQ9rbyltHhoMLP7YNdnhzeSJesYAfz77RP1YQmCuVh6EfnWQU +YDksswBVLuT1sw5XxJFBAJw/6KXf6vb/yPCtbVKoF6ubYfwSUTXkJf2vqmqGOQ== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 159662223612894884239637590694 +# MD5 Fingerprint: 26:29:f8:6d:e1:88:bf:a2:65:7f:aa:c4:cd:0f:7f:fc +# SHA1 Fingerprint: 6b:a0:b0:98:e1:71:ef:5a:ad:fe:48:15:80:77:10:f4:bd:6f:0b:28 +# SHA256 Fingerprint: b0:85:d7:0b:96:4f:19:1a:73:e4:af:0d:54:ae:7a:0e:07:aa:fd:af:9b:71:dd:08:62:13:8a:b7:32:5a:24:a2 +-----BEGIN CERTIFICATE----- +MIIB3DCCAYOgAwIBAgINAgPlfvU/k/2lCSGypjAKBggqhkjOPQQDAjBQMSQwIgYD +VQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0gUjQxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTIxMTEzMDAwMDAwWhcNMzgw +MTE5MDMxNDA3WjBQMSQwIgYDVQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0g +UjQxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wWTAT +BgcqhkjOPQIBBggqhkjOPQMBBwNCAAS4xnnTj2wlDp8uORkcA6SumuU5BwkWymOx +uYb4ilfBV85C+nOh92VC/x7BALJucw7/xyHlGKSq2XE/qNS5zowdo0IwQDAOBgNV +HQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVLB7rUW44kB/ ++wpu+74zyTyjhNUwCgYIKoZIzj0EAwIDRwAwRAIgIk90crlgr/HmnKAWBVBfw147 +bmF0774BxL4YSFlhgjICICadVGNA3jdgUM/I2O2dgq43mLyjj0xMqTQrbO/7lZsm +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R1 O=Google Trust Services LLC +# Subject: CN=GTS Root R1 O=Google Trust Services LLC +# Label: "GTS Root R1" +# Serial: 159662320309726417404178440727 +# MD5 Fingerprint: 05:fe:d0:bf:71:a8:a3:76:63:da:01:e0:d8:52:dc:40 +# SHA1 Fingerprint: e5:8c:1c:c4:91:3b:38:63:4b:e9:10:6e:e3:ad:8e:6b:9d:d9:81:4a +# SHA256 Fingerprint: d9:47:43:2a:bd:e7:b7:fa:90:fc:2e:6b:59:10:1b:12:80:e0:e1:c7:e4:e4:0f:a3:c6:88:7f:ff:57:a7:f4:cf +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo +27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w +Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw +TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl +qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH +szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8 +Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk +MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92 +wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p +aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN +VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb +C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe +QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy +h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4 +7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J +ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef +MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/ +Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT +6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ +0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm +2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb +bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R2 O=Google Trust Services LLC +# Subject: CN=GTS Root R2 O=Google Trust Services LLC +# Label: "GTS Root R2" +# Serial: 159662449406622349769042896298 +# MD5 Fingerprint: 1e:39:c0:53:e6:1e:29:82:0b:ca:52:55:36:5d:57:dc +# SHA1 Fingerprint: 9a:44:49:76:32:db:de:fa:d0:bc:fb:5a:7b:17:bd:9e:56:09:24:94 +# SHA256 Fingerprint: 8d:25:cd:97:22:9d:bf:70:35:6b:da:4e:b3:cc:73:40:31:e2:4c:f0:0f:af:cf:d3:2d:c7:6e:b5:84:1c:7e:a8 +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlrsWNBCUaqxElqjANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3LvCvpt +nfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3KgGjSY +6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9BuXvAu +MC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOdre7k +RXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXuPuWg +f9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1mKPV ++3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K8Yzo +dDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqjx5RW +Ir9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsRnTKa +G73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0kzCq +gc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9OktwID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBAB/Kzt3H +vqGf2SdMC9wXmBFqiN495nFWcrKeGk6c1SuYJF2ba3uwM4IJvd8lRuqYnrYb/oM8 +0mJhwQTtzuDFycgTE1XnqGOtjHsB/ncw4c5omwX4Eu55MaBBRTUoCnGkJE+M3DyC +B19m3H0Q/gxhswWV7uGugQ+o+MePTagjAiZrHYNSVc61LwDKgEDg4XSsYPWHgJ2u +NmSRXbBoGOqKYcl3qJfEycel/FVL8/B/uWU9J2jQzGv6U53hkRrJXRqWbTKH7QMg +yALOWr7Z6v2yTcQvG99fevX4i8buMTolUVVnjWQye+mew4K6Ki3pHrTgSAai/Gev +HyICc/sgCq+dVEuhzf9gR7A/Xe8bVr2XIZYtCtFenTgCR2y59PYjJbigapordwj6 +xLEokCZYCDzifqrXPW+6MYgKBesntaFJ7qBFVHvmJ2WZICGoo7z7GJa7Um8M7YNR +TOlZ4iBgxcJlkoKM8xAfDoqXvneCbT+PHV28SSe9zE8P4c52hgQjxcCMElv924Sg +JPFI/2R80L5cFtHvma3AH/vLrrw4IgYmZNralw4/KBVEqE8AyvCazM90arQ+POuV +7LXTWtiBmelDGDfrs7vRWGJB82bSj6p4lVQgw1oudCvV0b4YacCs1aTPObpRhANl +6WLAYv7YTVWW4tAR+kg0Eeye7QUd5MjWHYbL +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R3 O=Google Trust Services LLC +# Subject: CN=GTS Root R3 O=Google Trust Services LLC +# Label: "GTS Root R3" +# Serial: 159662495401136852707857743206 +# MD5 Fingerprint: 3e:e7:9d:58:02:94:46:51:94:e5:e0:22:4a:8b:e7:73 +# SHA1 Fingerprint: ed:e5:71:80:2b:c8:92:b9:5b:83:3c:d2:32:68:3f:09:cd:a0:1e:46 +# SHA256 Fingerprint: 34:d8:a7:3e:e2:08:d9:bc:db:0d:95:65:20:93:4b:4e:40:e6:94:82:59:6e:8b:6f:73:c8:42:6b:01:0a:6f:48 +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPluILrIPglJ209ZjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout736G +jOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2ADDL2 +4CejQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEA9uEglRR7 +VKOQFhG/hMjqb2sXnh5GmCCbn9MN2azTL818+FsuVbu/3ZL3pAzcMeGiAjEA/Jdm +ZuVDFhOD3cffL74UOO0BzrEXGhF16b0DjyZ+hOXJYKaV11RZt+cRLInUue4X +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R4 O=Google Trust Services LLC +# Subject: CN=GTS Root R4 O=Google Trust Services LLC +# Label: "GTS Root R4" +# Serial: 159662532700760215368942768210 +# MD5 Fingerprint: 43:96:83:77:19:4d:76:b3:9d:65:52:e4:1d:22:a5:e8 +# SHA1 Fingerprint: 77:d3:03:67:b5:e0:0c:15:f6:0c:38:61:df:7c:e1:3b:92:46:4d:47 +# SHA256 Fingerprint: 34:9d:fa:40:58:c5:e2:63:12:3b:39:8a:e7:95:57:3c:4e:13:13:c8:3f:e6:8f:93:55:6c:d5:e8:03:1b:3c:7d +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPlwGjvYxqccpBQUjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzuhXyi +QHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/lxKvR +HYqjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNpADBmAjEA6ED/g94D +9J+uHXqnLrmvT/aDHQ4thQEd0dlq7A/Cr8deVl5c1RxYIigL9zC2L7F8AjEA8GE8 +p/SgguMh1YQdc4acLa/KNJvxn7kjNuK8YAOdgLOaVsjh4rsUecrNIdSUtUlD +-----END CERTIFICATE----- + +# Issuer: CN=Telia Root CA v2 O=Telia Finland Oyj +# Subject: CN=Telia Root CA v2 O=Telia Finland Oyj +# Label: "Telia Root CA v2" +# Serial: 7288924052977061235122729490515358 +# MD5 Fingerprint: 0e:8f:ac:aa:82:df:85:b1:f4:dc:10:1c:fc:99:d9:48 +# SHA1 Fingerprint: b9:99:cd:d1:73:50:8a:c4:47:05:08:9c:8c:88:fb:be:a0:2b:40:cd +# SHA256 Fingerprint: 24:2b:69:74:2f:cb:1e:5b:2a:bf:98:89:8b:94:57:21:87:54:4e:5b:4d:99:11:78:65:73:62:1f:6a:74:b8:2c +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx +CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE +AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1 +NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ +MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP +ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq +AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9 +vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9 +lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD +n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT +7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o +6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC +TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6 +WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R +DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI +pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj +YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy +rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw +AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ +8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi +0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM +A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS +SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K +TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF +6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er +3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt +Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT +VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW +ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA +rBPuUBQemMc= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST BR Root CA 1 2020" +# Serial: 165870826978392376648679885835942448534 +# MD5 Fingerprint: b5:aa:4b:d5:ed:f7:e3:55:2e:8f:72:0a:f3:75:b8:ed +# SHA1 Fingerprint: 1f:5b:98:f0:e3:b5:f7:74:3c:ed:e6:b0:36:7d:32:cd:f4:09:41:67 +# SHA256 Fingerprint: e5:9a:aa:81:60:09:c2:2b:ff:5b:25:ba:d3:7d:f3:06:f0:49:79:7c:1f:81:d8:5a:b0:89:e6:57:bd:8f:00:44 +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQfMmPK4TX3+oPyWWa00tNljAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEJSIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTA5NDUwMFoXDTM1MDIxMTA5 +NDQ1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABMbLxyjR+4T1mu9CFCDhQ2tuda38KwOE1HaTJddZO0Flax7mNCq7dPYS +zuht56vkPE4/RAiLzRZxy7+SmfSk1zxQVFKQhYN4lGdnoxwJGT11NIXe7WB9xwy0 +QVK5buXuQqOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHOREKv/ +VbNafAkl1bK6CKBrqx9tMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2JyX3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwQlIlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAlJAtE/rhY/hhY+ithXhUkZy4kzg+GkHaQBZTQgjKL47xPoFW +wKrY7RjEsK70PvomAjEA8yjixtsrmfu3Ubgko6SUeho/5jbiA1czijDLgsfWFBHV +dWNbFJWcHwHP2NVypw87 +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST EV Root CA 1 2020" +# Serial: 126288379621884218666039612629459926992 +# MD5 Fingerprint: 8c:2d:9d:70:9f:48:99:11:06:11:fb:e9:cb:30:c0:6e +# SHA1 Fingerprint: 61:db:8c:21:59:69:03:90:d8:7c:9c:12:86:54:cf:9d:3d:f4:dd:07 +# SHA256 Fingerprint: 08:17:0d:1a:a3:64:53:90:1a:2f:95:92:45:e3:47:db:0c:8d:37:ab:aa:bc:56:b8:1a:a1:00:dc:95:89:70:db +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQXwJB13qHfEwDo6yWjfv/0DAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEVWIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTEwMDAwMFoXDTM1MDIxMTA5 +NTk1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBFViBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPEL3YZDIBnfl4XoIkqbz52Yv7QFJsnL46bSj8WeeHsxiamJrSc8ZRCC +/N/DnU7wMyPE0jL1HLDfMxddxfCxivnvubcUyilKwg+pf3VlSSowZ/Rk99Yad9rD +wpdhQntJraOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFH8QARY3 +OqQo5FD4pPfsazK2/umLMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2V2X3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwRVYlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAyjzGKnXCXnViOTYAYFqLwZOZzNnbQTs7h5kXO9XMT8oi96CA +y/m0sRtW9XLS/BnRAjEAkfcwkz8QRitxpNA7RJvAKQIFskF3UfN5Wp6OFKBOQtJb +gfM0agPnIjhQW+0ZT0MW +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS ECC P384 Root G5" +# Serial: 13129116028163249804115411775095713523 +# MD5 Fingerprint: d3:71:04:6a:43:1c:db:a6:59:e1:a8:a3:aa:c5:71:ed +# SHA1 Fingerprint: 17:f3:de:5e:9f:0f:19:e9:8e:f6:1f:32:26:6e:20:c4:07:ae:30:ee +# SHA256 Fingerprint: 01:8e:13:f0:77:25:32:cf:80:9b:d1:b1:72:81:86:72:83:fc:48:c6:e1:3b:e9:c6:98:12:85:4a:49:0c:1b:05 +-----BEGIN CERTIFICATE----- +MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp +Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2 +MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ +bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS +7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp +0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS +B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49 +BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ +LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4 +DXZDjC5Ty3zfDBeWUA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS RSA4096 Root G5" +# Serial: 11930366277458970227240571539258396554 +# MD5 Fingerprint: ac:fe:f7:34:96:a9:f2:b3:b4:12:4b:e4:27:41:6f:e1 +# SHA1 Fingerprint: a7:88:49:dc:5d:7c:75:8c:8c:de:39:98:56:b3:aa:d0:b2:a5:71:35 +# SHA256 Fingerprint: 37:1a:00:dc:05:33:b3:72:1a:7e:eb:40:e8:41:9e:70:79:9d:2b:0a:0f:2c:1d:80:69:31:65:f7:ce:c4:ad:75 +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN +MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT +HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN +NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs +IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+ +ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0 +2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp +wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM +pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD +nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po +sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx +Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd +Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX +KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe +XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL +tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv +TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN +AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw +GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H +PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF +O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ +REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik +AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv +/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+ +p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw +MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF +qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK +ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+ +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root R1 O=Certainly +# Subject: CN=Certainly Root R1 O=Certainly +# Label: "Certainly Root R1" +# Serial: 188833316161142517227353805653483829216 +# MD5 Fingerprint: 07:70:d4:3e:82:87:a0:fa:33:36:13:f4:fa:33:e7:12 +# SHA1 Fingerprint: a0:50:ee:0f:28:71:f4:27:b2:12:6d:6f:50:96:25:ba:cc:86:42:af +# SHA256 Fingerprint: 77:b8:2c:d8:64:4c:43:05:f7:ac:c5:cb:15:6b:45:67:50:04:03:3d:51:c6:0c:62:02:a8:e0:c3:34:67:d3:a0 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIRAI4P+UuQcWhlM1T01EQ5t+AwDQYJKoZIhvcNAQELBQAw +PTELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUNlcnRhaW5seTEaMBgGA1UEAxMRQ2Vy +dGFpbmx5IFJvb3QgUjEwHhcNMjEwNDAxMDAwMDAwWhcNNDYwNDAxMDAwMDAwWjA9 +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0 +YWlubHkgUm9vdCBSMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANA2 +1B/q3avk0bbm+yLA3RMNansiExyXPGhjZjKcA7WNpIGD2ngwEc/csiu+kr+O5MQT +vqRoTNoCaBZ0vrLdBORrKt03H2As2/X3oXyVtwxwhi7xOu9S98zTm/mLvg7fMbed +aFySpvXl8wo0tf97ouSHocavFwDvA5HtqRxOcT3Si2yJ9HiG5mpJoM610rCrm/b0 +1C7jcvk2xusVtyWMOvwlDbMicyF0yEqWYZL1LwsYpfSt4u5BvQF5+paMjRcCMLT5 +r3gajLQ2EBAHBXDQ9DGQilHFhiZ5shGIXsXwClTNSaa/ApzSRKft43jvRl5tcdF5 +cBxGX1HpyTfcX35pe0HfNEXgO4T0oYoKNp43zGJS4YkNKPl6I7ENPT2a/Z2B7yyQ +wHtETrtJ4A5KVpK8y7XdeReJkd5hiXSSqOMyhb5OhaRLWcsrxXiOcVTQAjeZjOVJ +6uBUcqQRBi8LjMFbvrWhsFNunLhgkR9Za/kt9JQKl7XsxXYDVBtlUrpMklZRNaBA +2CnbrlJ2Oy0wQJuK0EJWtLeIAaSHO1OWzaMWj/Nmqhexx2DgwUMFDO6bW2BvBlyH +Wyf5QBGenDPBt+U1VwV/J84XIIwc/PH72jEpSe31C4SnT8H2TsIonPru4K8H+zMR +eiFPCyEQtkA6qyI6BJyLm4SGcprSp6XEtHWRqSsjAgMBAAGjQjBAMA4GA1UdDwEB +/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTgqj8ljZ9EXME66C6u +d0yEPmcM9DANBgkqhkiG9w0BAQsFAAOCAgEAuVevuBLaV4OPaAszHQNTVfSVcOQr +PbA56/qJYv331hgELyE03fFo8NWWWt7CgKPBjcZq91l3rhVkz1t5BXdm6ozTaw3d +8VkswTOlMIAVRQdFGjEitpIAq5lNOo93r6kiyi9jyhXWx8bwPWz8HA2YEGGeEaIi +1wrykXprOQ4vMMM2SZ/g6Q8CRFA3lFV96p/2O7qUpUzpvD5RtOjKkjZUbVwlKNrd +rRT90+7iIgXr0PK3aBLXWopBGsaSpVo7Y0VPv+E6dyIvXL9G+VoDhRNCX8reU9di +taY1BMJH/5n9hN9czulegChB8n3nHpDYT3Y+gjwN/KUD+nsa2UUeYNrEjvn8K8l7 +lcUq/6qJ34IxD3L/DCfXCh5WAFAeDJDBlrXYFIW7pw0WwfgHJBu6haEaBQmAupVj +yTrsJZ9/nbqkRxWbRHDxakvWOF5D8xh+UG7pWijmZeZ3Gzr9Hb4DJqPb1OG7fpYn +Kx3upPvaJVQTA945xsMfTZDsjxtK0hzthZU4UHlG1sGQUDGpXJpuHfUzVounmdLy +yCwzk5Iwx06MZTMQZBf9JBeW0Y3COmor6xOLRPIh80oat3df1+2IpHLlOR+Vnb5n +wXARPbv0+Em34yaXOp/SX3z7wJl8OSngex2/DaeP0ik0biQVy96QXr8axGbqwua6 +OV+KmalBWQewLK8= +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root E1 O=Certainly +# Subject: CN=Certainly Root E1 O=Certainly +# Label: "Certainly Root E1" +# Serial: 8168531406727139161245376702891150584 +# MD5 Fingerprint: 0a:9e:ca:cd:3e:52:50:c6:36:f3:4b:a3:ed:a7:53:e9 +# SHA1 Fingerprint: f9:e1:6d:dc:01:89:cf:d5:82:45:63:3e:c5:37:7d:c2:eb:93:6f:2b +# SHA256 Fingerprint: b4:58:5f:22:e4:ac:75:6a:4e:86:12:a1:36:1c:5d:9d:03:1a:93:fd:84:fe:bb:77:8f:a3:06:8b:0f:c4:2d:c2 +-----BEGIN CERTIFICATE----- +MIIB9zCCAX2gAwIBAgIQBiUzsUcDMydc+Y2aub/M+DAKBggqhkjOPQQDAzA9MQsw +CQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0YWlu +bHkgUm9vdCBFMTAeFw0yMTA0MDEwMDAwMDBaFw00NjA0MDEwMDAwMDBaMD0xCzAJ +BgNVBAYTAlVTMRIwEAYDVQQKEwlDZXJ0YWlubHkxGjAYBgNVBAMTEUNlcnRhaW5s +eSBSb290IEUxMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3m/4fxzf7flHh4axpMCK ++IKXgOqPyEpeKn2IaKcBYhSRJHpcnqMXfYqGITQYUBsQ3tA3SybHGWCA6TS9YBk2 +QNYphwk8kXr2vBMj3VlOBF7PyAIcGFPBMdjaIOlEjeR2o0IwQDAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU8ygYy2R17ikq6+2uI1g4 +hevIIgcwCgYIKoZIzj0EAwMDaAAwZQIxALGOWiDDshliTd6wT99u0nCK8Z9+aozm +ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG +BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR +-----END CERTIFICATE----- + +# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Label: "Security Communication ECC RootCA1" +# Serial: 15446673492073852651 +# MD5 Fingerprint: 7e:43:b0:92:68:ec:05:43:4c:98:ab:5d:35:2e:7e:86 +# SHA1 Fingerprint: b8:0e:26:a9:bf:d2:b2:3b:c0:ef:46:c9:ba:c7:bb:f6:1d:0d:41:41 +# SHA256 Fingerprint: e7:4f:bd:a5:5b:d5:64:c4:73:a3:6b:44:1a:a7:99:c8:a6:8e:07:74:40:e8:28:8b:9f:a1:e5:0e:4b:ba:ca:11 +-----BEGIN CERTIFICATE----- +MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYT +AkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYD +VQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYx +NjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTELMAkGA1UEBhMCSlAxJTAjBgNVBAoT +HFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNVBAMTIlNlY3VyaXR5 +IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+Cnnfdl +dB9sELLo5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpK +ULGjQjBAMB0GA1UdDgQWBBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu +9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O +be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k= +-----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA1" +# Serial: 113562791157148395269083148143378328608 +# MD5 Fingerprint: 42:32:99:76:43:33:36:24:35:07:82:9b:28:f9:d0:90 +# SHA1 Fingerprint: d5:ec:8d:7b:4c:ba:79:f4:e7:e8:cb:9d:6b:ae:77:83:10:03:21:6a +# SHA256 Fingerprint: f3:89:6f:88:fe:7c:0a:88:27:66:a7:fa:6a:d2:74:9f:b5:7a:7f:3e:98:fb:76:9c:1f:a7:b0:9c:2c:44:d5:ae +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIQVW9l47TZkGobCdFsPsBsIDANBgkqhkiG9w0BAQsFADBU +MQswCQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRI +T1JJVFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0ExMB4XDTE5MTIxOTAz +MTYxN1oXDTQ0MTIxMjAzMTYxN1owVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJF +SUpJTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2Jh +bCBSb290IENBMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAPFmCL3Z +xRVhy4QEQaVpN3cdwbB7+sN3SJATcmTRuHyQNZ0YeYjjlwE8R4HyDqKYDZ4/N+AZ +spDyRhySsTphzvq3Rp4Dhtczbu33RYx2N95ulpH3134rhxfVizXuhJFyV9xgw8O5 +58dnJCNPYwpj9mZ9S1WnP3hkSWkSl+BMDdMJoDIwOvqfwPKcxRIqLhy1BDPapDgR +at7GGPZHOiJBhyL8xIkoVNiMpTAK+BcWyqw3/XmnkRd4OJmtWO2y3syJfQOcs4ll +5+M7sSKGjwZteAf9kRJ/sGsciQ35uMt0WwfCyPQ10WRjeulumijWML3mG90Vr4Tq +nMfK9Q7q8l0ph49pczm+LiRvRSGsxdRpJQaDrXpIhRMsDQa4bHlW/KNnMoH1V6XK +V0Jp6VwkYe/iMBhORJhVb3rCk9gZtt58R4oRTklH2yiUAguUSiz5EtBP6DF+bHq/ +pj+bOT0CFqMYs2esWz8sgytnOYFcuX6U1WTdno9uruh8W7TXakdI136z1C2OVnZO +z2nxbkRs1CTqjSShGL+9V/6pmTW12xB3uD1IutbB5/EjPtffhZ0nPNRAvQoMvfXn +jSXWgXSHRtQpdaJCbPdzied9v3pKH9MiyRVVz99vfFXQpIsHETdfg6YmV6YBW37+ +WGgHqel62bno/1Afq8K0wM7o6v0PvY1NuLxxAgMBAAGjQjBAMB0GA1UdDgQWBBTF +7+3M2I0hxkjk49cULqcWk+WYATAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE +AwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAUoKsITQfI/Ki2Pm4rzc2IInRNwPWaZ+4 +YRC6ojGYWUfo0Q0lHhVBDOAqVdVXUsv45Mdpox1NcQJeXyFFYEhcCY5JEMEE3Kli +awLwQ8hOnThJdMkycFRtwUf8jrQ2ntScvd0g1lPJGKm1Vrl2i5VnZu69mP6u775u ++2D2/VnGKhs/I0qUJDAnyIm860Qkmss9vk/Ves6OF8tiwdneHg56/0OGNFK8YT88 +X7vZdrRTvJez/opMEi4r89fO4aL/3Xtw+zuhTaRjAv04l5U/BXCga99igUOLtFkN +SoxUnMW7gZ/NfaXvCyUeOiDbHPwfmGcCCtRzRBPbUYQaVQNW4AB+dAb/OMRyHdOo +P2gxXdMJxy6MW2Pg6Nwe0uxhHvLe5e/2mXZgLR6UcnHGCyoyx5JO1UbXHfmpGQrI ++pXObSOYqgs4rZpWDW+N8TEAiMEXnM0ZNjX+VVOg4DwzX5Ze4jLp3zO7Bkqp2IRz +znfSxqxx4VyjHQy7Ct9f4qNx2No3WqB4K/TUfet27fJhcKVlmtOJNBir+3I+17Q9 +eVzYH6Eze9mCUAyTF6ps3MKCuwJXNq+YJyo5UOGwifUll35HaBC07HPKs5fRJNz2 +YqAo07WjuGS3iGJCz51TzZm+ZGiPTx4SSPfSKcOYKMryMguTjClPPGAyzQWWYezy +r/6zcCwupvI= +-----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA2" +# Serial: 58605626836079930195615843123109055211 +# MD5 Fingerprint: 5e:0a:f6:47:5f:a6:14:e8:11:01:95:3f:4d:01:eb:3c +# SHA1 Fingerprint: f4:27:86:eb:6e:b8:6d:88:31:67:02:fb:ba:66:a4:53:00:aa:7a:a6 +# SHA256 Fingerprint: 57:4d:f6:93:1e:27:80:39:66:7b:72:0a:fd:c1:60:0f:c2:7e:b6:6d:d3:09:29:79:fb:73:85:64:87:21:28:82 +-----BEGIN CERTIFICATE----- +MIICJTCCAaugAwIBAgIQLBcIfWQqwP6FGFkGz7RK6zAKBggqhkjOPQQDAzBUMQsw +CQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRIT1JJ +VFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0EyMB4XDTE5MTIxOTAzMTgy +MVoXDTQ0MTIxMjAzMTgyMVowVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJFSUpJ +TkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2JhbCBS +b290IENBMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABJ3LgJGNU2e1uVCxA/jlSR9B +IgmwUVJY1is0j8USRhTFiy8shP8sbqjV8QnjAyEUxEM9fMEsxEtqSs3ph+B99iK+ ++kpRuDCK/eHeGBIK9ke35xe/J4rUQUyWPGCWwf0VHKNCMEAwHQYDVR0OBBYEFNJK +sVF/BvDRgh9Obl+rg/xI1LCRMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA +94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B +43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root E46" +# Serial: 88989738453351742415770396670917916916 +# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01 +# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a +# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83 +-----BEGIN CERTIFICATE----- +MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw +CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T +ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN +MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG +A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT +ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC +WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+ +6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B +Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa +qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q +4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root R46" +# Serial: 156256931880233212765902055439220583700 +# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5 +# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38 +# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06 +-----BEGIN CERTIFICATE----- +MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf +MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD +Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw +HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY +MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp +YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa +ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz +SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf +iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X +ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3 +IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS +VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE +SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu ++Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt +8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L +HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt +zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P +AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c +mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ +YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52 +gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA +Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB +JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX +DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui +TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5 +dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65 +LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp +0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY +QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS RSA Root CA 2022" +# Serial: 148535279242832292258835760425842727825 +# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da +# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca +# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed +-----BEGIN CERTIFICATE----- +MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO +MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD +DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX +DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw +b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC +AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP +L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY +t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins +S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3 +PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO +L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3 +R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w +dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS ++YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS +d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG +AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f +gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j +BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z +NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt +hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM +QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf +R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ +DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW +P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy +lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq +bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w +AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q +r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji +Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU +98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS ECC Root CA 2022" +# Serial: 26605119622390491762507526719404364228 +# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5 +# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39 +# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43 +-----BEGIN CERTIFICATE----- +MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT +U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2 +MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh +dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm +acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN +SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME +GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW +uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp +15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN +b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA ECC TLS 2021" +# Serial: 81873346711060652204712539181482831616 +# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8 +# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd +# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8 +-----BEGIN CERTIFICATE----- +MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w +LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w +CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0 +MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF +Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI +zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X +tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4 +AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2 +KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD +aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu +CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo +9H1/IISpQuQo +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA RSA TLS 2021" +# Serial: 111436099570196163832749341232207667876 +# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2 +# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48 +# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f +-----BEGIN CERTIFICATE----- +MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM +MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx +MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00 +MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD +QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN +BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z +4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv +Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ +kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs +GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln +nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh +3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD +0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy +geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8 +ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB +c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI +pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB +DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS +4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs +o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ +qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw +xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM +rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4 +AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR +0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY +o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5 +dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE +oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ== +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. +# Label: "TrustAsia Global Root CA G3" +# Serial: 576386314500428537169965010905813481816650257167 +# MD5 Fingerprint: 30:42:1b:b7:bb:81:75:35:e4:16:4f:53:d2:94:de:04 +# SHA1 Fingerprint: 63:cf:b6:c1:27:2b:56:e4:88:8e:1c:23:9a:b6:2e:81:47:24:c3:c7 +# SHA256 Fingerprint: e0:d3:22:6a:eb:11:63:c2:e4:8f:f9:be:3b:50:b4:c6:43:1b:e7:bb:1e:ac:c5:c3:6b:5d:5e:c5:09:03:9a:08 +-----BEGIN CERTIFICATE----- +MIIFpTCCA42gAwIBAgIUZPYOZXdhaqs7tOqFhLuxibhxkw8wDQYJKoZIhvcNAQEM +BQAwWjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dp +ZXMsIEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHMzAe +Fw0yMTA1MjAwMjEwMTlaFw00NjA1MTkwMjEwMTlaMFoxCzAJBgNVBAYTAkNOMSUw +IwYDVQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtU +cnVzdEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzMwggIiMA0GCSqGSIb3DQEBAQUAA4IC +DwAwggIKAoICAQDAMYJhkuSUGwoqZdC+BqmHO1ES6nBBruL7dOoKjbmzTNyPtxNS +T1QY4SxzlZHFZjtqz6xjbYdT8PfxObegQ2OwxANdV6nnRM7EoYNl9lA+sX4WuDqK +AtCWHwDNBSHvBm3dIZwZQ0WhxeiAysKtQGIXBsaqvPPW5vxQfmZCHzyLpnl5hkA1 +nyDvP+uLRx+PjsXUjrYsyUQE49RDdT/VP68czH5GX6zfZBCK70bwkPAPLfSIC7Ep +qq+FqklYqL9joDiR5rPmd2jE+SoZhLsO4fWvieylL1AgdB4SQXMeJNnKziyhWTXA +yB1GJ2Faj/lN03J5Zh6fFZAhLf3ti1ZwA0pJPn9pMRJpxx5cynoTi+jm9WAPzJMs +hH/x/Gr8m0ed262IPfN2dTPXS6TIi/n1Q1hPy8gDVI+lhXgEGvNz8teHHUGf59gX +zhqcD0r83ERoVGjiQTz+LISGNzzNPy+i2+f3VANfWdP3kXjHi3dqFuVJhZBFcnAv +kV34PmVACxmZySYgWmjBNb9Pp1Hx2BErW+Canig7CjoKH8GB5S7wprlppYiU5msT +f9FkPz2ccEblooV7WIQn3MSAPmeamseaMQ4w7OYXQJXZRe0Blqq/DPNL0WP3E1jA +uPP6Z92bfW1K/zJMtSU7/xxnD4UiWQWRkUF3gdCFTIcQcf+eQxuulXUtgQIDAQAB +o2MwYTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFEDk5PIj7zjKsK5Xf/Ih +MBY027ySMB0GA1UdDgQWBBRA5OTyI+84yrCuV3/yITAWNNu8kjAOBgNVHQ8BAf8E +BAMCAQYwDQYJKoZIhvcNAQEMBQADggIBACY7UeFNOPMyGLS0XuFlXsSUT9SnYaP4 +wM8zAQLpw6o1D/GUE3d3NZ4tVlFEbuHGLige/9rsR82XRBf34EzC4Xx8MnpmyFq2 +XFNFV1pF1AWZLy4jVe5jaN/TG3inEpQGAHUNcoTpLrxaatXeL1nHo+zSh2bbt1S1 +JKv0Q3jbSwTEb93mPmY+KfJLaHEih6D4sTNjduMNhXJEIlU/HHzp/LgV6FL6qj6j +ITk1dImmasI5+njPtqzn59ZW/yOSLlALqbUHM/Q4X6RJpstlcHboCoWASzY9M/eV +VHUl2qzEc4Jl6VL1XP04lQJqaTDFHApXB64ipCz5xUG3uOyfT0gA+QEEVcys+TIx +xHWVBqB/0Y0n3bOppHKH/lmLmnp0Ft0WpWIp6zqW3IunaFnT63eROfjXy9mPX1on +AX1daBli2MjN9LdyR75bl87yraKZk62Uy5P2EgmVtqvXO9A/EcswFi55gORngS1d +7XB4tmBZrOFdRWOPyN9yaFvqHbgB8X7754qz41SgOAngPN5C8sLtLpvzHzW2Ntjj +gKGLzZlkD8Kqq7HK9W+eQ42EVJmzbsASZthwEPEGNTNDqJwuuhQxzhB/HIbjj9LV ++Hfsm6vxL2PZQl/gZ4FkkfGXL/xuJvYz+NO1+MRiqzFRJQJ6+N1rZdVtTTDIZbpo +FGWsJwt0ivKH +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. +# Label: "TrustAsia Global Root CA G4" +# Serial: 451799571007117016466790293371524403291602933463 +# MD5 Fingerprint: 54:dd:b2:d7:5f:d8:3e:ed:7c:e0:0b:2e:cc:ed:eb:eb +# SHA1 Fingerprint: 57:73:a5:61:5d:80:b2:e6:ac:38:82:fc:68:07:31:ac:9f:b5:92:5a +# SHA256 Fingerprint: be:4b:56:cb:50:56:c0:13:6a:52:6d:f4:44:50:8d:aa:36:a0:b5:4f:42:e4:ac:38:f7:2a:f4:70:e4:79:65:4c +-----BEGIN CERTIFICATE----- +MIICVTCCAdygAwIBAgIUTyNkuI6XY57GU4HBdk7LKnQV1tcwCgYIKoZIzj0EAwMw +WjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs +IEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHNDAeFw0y +MTA1MjAwMjEwMjJaFw00NjA1MTkwMjEwMjJaMFoxCzAJBgNVBAYTAkNOMSUwIwYD +VQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtUcnVz +dEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATx +s8045CVD5d4ZCbuBeaIVXxVjAd7Cq92zphtnS4CDr5nLrBfbK5bKfFJV4hrhPVbw +LxYI+hW8m7tH5j/uqOFMjPXTNvk4XatwmkcN4oFBButJ+bAp3TPsUKV/eSm4IJij +YzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUpbtKl86zK3+kMd6Xg1mD +pm9xy94wHQYDVR0OBBYEFKW7SpfOsyt/pDHel4NZg6ZvccveMA4GA1UdDwEB/wQE +AwIBBjAKBggqhkjOPQQDAwNnADBkAjBe8usGzEkxn0AAbbd+NvBNEU/zy4k6LHiR +UKNbwMp1JvK/kF0LgoxgKJ/GcJpo5PECMFxYDlZ2z1jD1xCMuo6u47xkdUfFVZDj +/bpV6wfEU6s3qe4hsiFbYI89MvHVI5TWWA== +-----END CERTIFICATE----- + +# Issuer: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH +# Subject: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH +# Label: "Telekom Security TLS ECC Root 2020" +# Serial: 72082518505882327255703894282316633856 +# MD5 Fingerprint: c1:ab:fe:6a:10:2c:03:8d:bc:1c:22:32:c0:85:a7:fd +# SHA1 Fingerprint: c0:f8:96:c5:a9:3b:01:06:21:07:da:18:42:48:bc:e9:9d:88:d5:ec +# SHA256 Fingerprint: 57:8a:f4:de:d0:85:3f:4e:59:98:db:4a:ea:f9:cb:ea:8d:94:5f:60:b6:20:a3:8d:1a:3c:13:b2:bc:7b:a8:e1 +-----BEGIN CERTIFICATE----- +MIICQjCCAcmgAwIBAgIQNjqWjMlcsljN0AFdxeVXADAKBggqhkjOPQQDAzBjMQsw +CQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0eSBH +bWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBFQ0MgUm9vdCAyMDIw +MB4XDTIwMDgyNTA3NDgyMFoXDTQ1MDgyNTIzNTk1OVowYzELMAkGA1UEBhMCREUx +JzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkGA1UE +AwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgRUNDIFJvb3QgMjAyMDB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABM6//leov9Wq9xCazbzREaK9Z0LMkOsVGJDZos0MKiXrPk/O +tdKPD/M12kOLAoC+b1EkHQ9rK8qfwm9QMuU3ILYg/4gND21Ju9sGpIeQkpT0CdDP +f8iAC8GXs7s1J8nCG6NCMEAwHQYDVR0OBBYEFONyzG6VmUex5rNhTNHLq+O6zd6f +MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2cA +MGQCMHVSi7ekEE+uShCLsoRbQuHmKjYC2qBuGT8lv9pZMo7k+5Dck2TOrbRBR2Di +z6fLHgIwN0GMZt9Ba9aDAEH9L1r3ULRn0SyocddDypwnJJGDSA3PzfdUga/sf+Rn +27iQ7t0l +-----END CERTIFICATE----- + +# Issuer: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH +# Subject: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH +# Label: "Telekom Security TLS RSA Root 2023" +# Serial: 44676229530606711399881795178081572759 +# MD5 Fingerprint: bf:5b:eb:54:40:cd:48:71:c4:20:8d:7d:de:0a:42:f2 +# SHA1 Fingerprint: 54:d3:ac:b3:bd:57:56:f6:85:9d:ce:e5:c3:21:e2:d4:ad:83:d0:93 +# SHA256 Fingerprint: ef:c6:5c:ad:bb:59:ad:b6:ef:e8:4d:a2:23:11:b3:56:24:b7:1b:3b:1e:a0:da:8b:66:55:17:4e:c8:97:86:46 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIQIZxULej27HF3+k7ow3BXlzANBgkqhkiG9w0BAQwFADBj +MQswCQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0 +eSBHbWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBSU0EgUm9vdCAy +MDIzMB4XDTIzMDMyODEyMTY0NVoXDTQ4MDMyNzIzNTk1OVowYzELMAkGA1UEBhMC +REUxJzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkG +A1UEAwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgUlNBIFJvb3QgMjAyMzCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAO01oYGA88tKaVvC+1GDrib94W7zgRJ9 +cUD/h3VCKSHtgVIs3xLBGYSJwb3FKNXVS2xE1kzbB5ZKVXrKNoIENqil/Cf2SfHV +cp6R+SPWcHu79ZvB7JPPGeplfohwoHP89v+1VmLhc2o0mD6CuKyVU/QBoCcHcqMA +U6DksquDOFczJZSfvkgdmOGjup5czQRxUX11eKvzWarE4GC+j4NSuHUaQTXtvPM6 +Y+mpFEXX5lLRbtLevOP1Czvm4MS9Q2QTps70mDdsipWol8hHD/BeEIvnHRz+sTug +BTNoBUGCwQMrAcjnj02r6LX2zWtEtefdi+zqJbQAIldNsLGyMcEWzv/9FIS3R/qy +8XDe24tsNlikfLMR0cN3f1+2JeANxdKz+bi4d9s3cXFH42AYTyS2dTd4uaNir73J +co4vzLuu2+QVUhkHM/tqty1LkCiCc/4YizWN26cEar7qwU02OxY2kTLvtkCJkUPg +8qKrBC7m8kwOFjQgrIfBLX7JZkcXFBGk8/ehJImr2BrIoVyxo/eMbcgByU/J7MT8 +rFEz0ciD0cmfHdRHNCk+y7AO+oMLKFjlKdw/fKifybYKu6boRhYPluV75Gp6SG12 +mAWl3G0eQh5C2hrgUve1g8Aae3g1LDj1H/1Joy7SWWO/gLCMk3PLNaaZlSJhZQNg ++y+TS/qanIA7AgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUtqeX +gj10hZv3PJ+TmpV5dVKMbUcwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBS2 +p5eCPXSFm/c8n5OalXl1UoxtRzANBgkqhkiG9w0BAQwFAAOCAgEAqMxhpr51nhVQ +pGv7qHBFfLp+sVr8WyP6Cnf4mHGCDG3gXkaqk/QeoMPhk9tLrbKmXauw1GLLXrtm +9S3ul0A8Yute1hTWjOKWi0FpkzXmuZlrYrShF2Y0pmtjxrlO8iLpWA1WQdH6DErw +M807u20hOq6OcrXDSvvpfeWxm4bu4uB9tPcy/SKE8YXJN3nptT+/XOR0so8RYgDd +GGah2XsjX/GO1WfoVNpbOms2b/mBsTNHM3dA+VKq3dSDz4V4mZqTuXNnQkYRIer+ +CqkbGmVps4+uFrb2S1ayLfmlyOw7YqPta9BO1UAJpB+Y1zqlklkg5LB9zVtzaL1t +xKITDmcZuI1CfmwMmm6gJC3VRRvcxAIU/oVbZZfKTpBQCHpCNfnqwmbU+AGuHrS+ +w6jv/naaoqYfRvaE7fzbzsQCzndILIyy7MMAo+wsVRjBfhnu4S/yrYObnqsZ38aK +L4x35bcF7DvB7L6Gs4a8wPfc5+pbrrLMtTWGS9DiP7bY+A4A7l3j941Y/8+LN+lj +X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q +ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm +dTdmQRCsu/WU48IxK63nI1bMNSWSs1A= +-----END CERTIFICATE----- + +# Issuer: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA +# Subject: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA +# Label: "FIRMAPROFESIONAL CA ROOT-A WEB" +# Serial: 65916896770016886708751106294915943533 +# MD5 Fingerprint: 82:b2:ad:45:00:82:b0:66:63:f8:5f:c3:67:4e:ce:a3 +# SHA1 Fingerprint: a8:31:11:74:a6:14:15:0d:ca:77:dd:0e:e4:0c:5d:58:fc:a0:72:a5 +# SHA256 Fingerprint: be:f2:56:da:f2:6e:9c:69:bd:ec:16:02:35:97:98:f3:ca:f7:18:21:a0:3e:01:82:57:c5:3c:65:61:7f:3d:4a +-----BEGIN CERTIFICATE----- +MIICejCCAgCgAwIBAgIQMZch7a+JQn81QYehZ1ZMbTAKBggqhkjOPQQDAzBuMQsw +CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE +YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB +IFJPT1QtQSBXRUIwHhcNMjIwNDA2MDkwMTM2WhcNNDcwMzMxMDkwMTM2WjBuMQsw +CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE +YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB +IFJPT1QtQSBXRUIwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARHU+osEaR3xyrq89Zf +e9MEkVz6iMYiuYMQYneEMy3pA4jU4DP37XcsSmDq5G+tbbT4TIqk5B/K6k84Si6C +cyvHZpsKjECcfIr28jlgst7L7Ljkb+qbXbdTkBgyVcUgt5SjYzBhMA8GA1UdEwEB +/wQFMAMBAf8wHwYDVR0jBBgwFoAUk+FDY1w8ndYn81LsF7Kpryz3dvgwHQYDVR0O +BBYEFJPhQ2NcPJ3WJ/NS7Beyqa8s93b4MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjO +PQQDAwNoADBlAjAdfKR7w4l1M+E7qUW/Runpod3JIha3RxEL2Jq68cgLcFBTApFw +hVmpHqTm6iMxoAACMQD94vizrxa5HnPEluPBMBnYfubDl94cT7iJLzPrSA8Z94dG +XSaQpYXFuXqUPoeovQA= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA CYBER Root CA" +# Serial: 85076849864375384482682434040119489222 +# MD5 Fingerprint: 0b:33:a0:97:52:95:d4:a9:fd:bb:db:6e:a3:55:5b:51 +# SHA1 Fingerprint: f6:b1:1c:1a:83:38:e9:7b:db:b3:a8:c8:33:24:e0:2d:9c:7f:26:66 +# SHA256 Fingerprint: 3f:63:bb:28:14:be:17:4e:c8:b6:43:9c:f0:8d:6d:56:f0:b7:c4:05:88:3a:56:48:a3:34:42:4d:6b:3e:c5:58 +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIQQAE0jMIAAAAAAAAAATzyxjANBgkqhkiG9w0BAQwFADBQ +MQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FOLUNBMRAwDgYDVQQLEwdSb290 +IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3QgQ0EwHhcNMjIxMTIyMDY1NDI5 +WhcNNDcxMTIyMTU1OTU5WjBQMQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FO +LUNBMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3Qg +Q0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDG+Moe2Qkgfh1sTs6P +40czRJzHyWmqOlt47nDSkvgEs1JSHWdyKKHfi12VCv7qze33Kc7wb3+szT3vsxxF +avcokPFhV8UMxKNQXd7UtcsZyoC5dc4pztKFIuwCY8xEMCDa6pFbVuYdHNWdZsc/ +34bKS1PE2Y2yHer43CdTo0fhYcx9tbD47nORxc5zb87uEB8aBs/pJ2DFTxnk684i +JkXXYJndzk834H/nY62wuFm40AZoNWDTNq5xQwTxaWV4fPMf88oon1oglWa0zbfu +j3ikRRjpJi+NmykosaS3Om251Bw4ckVYsV7r8Cibt4LK/c/WMw+f+5eesRycnupf +Xtuq3VTpMCEobY5583WSjCb+3MX2w7DfRFlDo7YDKPYIMKoNM+HvnKkHIuNZW0CP +2oi3aQiotyMuRAlZN1vH4xfyIutuOVLF3lSnmMlLIJXcRolftBL5hSmO68gnFSDA +S9TMfAxsNAwmmyYxpjyn9tnQS6Jk/zuZQXLB4HCX8SS7K8R0IrGsayIyJNN4KsDA +oS/xUgXJP+92ZuJF2A09rZXIx4kmyA+upwMu+8Ff+iDhcK2wZSA3M2Cw1a/XDBzC +kHDXShi8fgGwsOsVHkQGzaRP6AzRwyAQ4VRlnrZR0Bp2a0JaWHY06rc3Ga4udfmW +5cFZ95RXKSWNOkyrTZpB0F8mAwIDAQABo2MwYTAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBSdhWEUfMFib5do5E83QOGt4A1WNzAd +BgNVHQ4EFgQUnYVhFHzBYm+XaORPN0DhreANVjcwDQYJKoZIhvcNAQEMBQADggIB +AGSPesRiDrWIzLjHhg6hShbNcAu3p4ULs3a2D6f/CIsLJc+o1IN1KriWiLb73y0t +tGlTITVX1olNc79pj3CjYcya2x6a4CD4bLubIp1dhDGaLIrdaqHXKGnK/nZVekZn +68xDiBaiA9a5F/gZbG0jAn/xX9AKKSM70aoK7akXJlQKTcKlTfjF/biBzysseKNn +TKkHmvPfXvt89YnNdJdhEGoHK4Fa0o635yDRIG4kqIQnoVesqlVYL9zZyvpoBJ7t +RCT5dEA7IzOrg1oYJkK2bVS1FmAwbLGg+LhBoF1JSdJlBTrq/p1hvIbZv97Tujqx +f36SNI7JAG7cmL3c7IAFrQI932XtCwP39xaEBDG6k5TY8hL4iuO/Qq+n1M0RFxbI +Qh0UqEL20kCGoE8jypZFVmAGzbdVAaYBlGX+bgUJurSkquLvWL69J1bY73NxW0Qz +8ppy6rBePm6pUlvscG21h483XjyMnM7k8M4MZ0HMzvaAq07MTFb1wWFZk7Q+ptq4 +NxKfKjLji7gh7MMrZQzvIt6IKTtM1/r+t+FHvpw+PoP7UV31aPcuIYXcv/Fa4nzX +xeSDwWrruoBa3lwtcHb4yOWHh8qgnaHlIhInD0Q9HWzq1MKLL295q39QpsQZp6F6 +t5b5wR9iWqJDB0BeJsas7a5wFsWqynKKTbDPAYsDP27X +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd. +# Subject: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd. +# Label: "SecureSign Root CA12" +# Serial: 587887345431707215246142177076162061960426065942 +# MD5 Fingerprint: c6:89:ca:64:42:9b:62:08:49:0b:1e:7f:e9:07:3d:e8 +# SHA1 Fingerprint: 7a:22:1e:3d:de:1b:06:ac:9e:c8:47:70:16:8e:3c:e5:f7:6b:06:f4 +# SHA256 Fingerprint: 3f:03:4b:b5:70:4d:44:b2:d0:85:45:a0:20:57:de:93:eb:f3:90:5f:ce:72:1a:cb:c7:30:c0:6d:da:ee:90:4e +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUZvnHwa/swlG07VOX5uaCwysckBYwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u +LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExMjAeFw0yMDA0MDgw +NTM2NDZaFw00MDA0MDgwNTM2NDZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD +eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS +b290IENBMTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6OcE3emhF +KxS06+QT61d1I02PJC0W6K6OyX2kVzsqdiUzg2zqMoqUm048luT9Ub+ZyZN+v/mt +p7JIKwccJ/VMvHASd6SFVLX9kHrko+RRWAPNEHl57muTH2SOa2SroxPjcf59q5zd +J1M3s6oYwlkm7Fsf0uZlfO+TvdhYXAvA42VvPMfKWeP+bl+sg779XSVOKik71gur +FzJ4pOE+lEa+Ym6b3kaosRbnhW70CEBFEaCeVESE99g2zvVQR9wsMJvuwPWW0v4J +hscGWa5Pro4RmHvzC1KqYiaqId+OJTN5lxZJjfU+1UefNzFJM3IFTQy2VYzxV4+K +h9GtxRESOaCtAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBRXNPN0zwRL1SXm8UC2LEzZLemgrTANBgkqhkiG9w0BAQsF +AAOCAQEAPrvbFxbS8hQBICw4g0utvsqFepq2m2um4fylOqyttCg6r9cBg0krY6Ld +mmQOmFxv3Y67ilQiLUoT865AQ9tPkbeGGuwAtEGBpE/6aouIs3YIcipJQMPTw4WJ +mBClnW8Zt7vPemVV2zfrPIpyMpcemik+rY3moxtt9XUa5rBouVui7mlHJzWhhpmA +8zNL4WukJsPvdFlseqJkth5Ew1DgDzk9qTPxpfPSvWKErI4cqc1avTc7bgoitPQV +55FYxTpE05Uo2cBl6XLK0A+9H7MV2anjpEcJnuDLN/v9vZfVvhgaaaI5gdka9at/ +yOPiZwud9AzqVN/Ssq+xIvEg37xEHA== +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd. +# Subject: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd. +# Label: "SecureSign Root CA14" +# Serial: 575790784512929437950770173562378038616896959179 +# MD5 Fingerprint: 71:0d:72:fa:92:19:65:5e:89:04:ac:16:33:f0:bc:d5 +# SHA1 Fingerprint: dd:50:c0:f7:79:b3:64:2e:74:a2:b8:9d:9f:d3:40:dd:bb:f0:f2:4f +# SHA256 Fingerprint: 4b:00:9c:10:34:49:4f:9a:b5:6b:ba:3b:a1:d6:27:31:fc:4d:20:d8:95:5a:dc:ec:10:a9:25:60:72:61:e3:38 +-----BEGIN CERTIFICATE----- +MIIFcjCCA1qgAwIBAgIUZNtaDCBO6Ncpd8hQJ6JaJ90t8sswDQYJKoZIhvcNAQEM +BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u +LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNDAeFw0yMDA0MDgw +NzA2MTlaFw00NTA0MDgwNzA2MTlaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD +eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS +b290IENBMTQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDF0nqh1oq/ +FjHQmNE6lPxauG4iwWL3pwon71D2LrGeaBLwbCRjOfHw3xDG3rdSINVSW0KZnvOg +vlIfX8xnbacuUKLBl422+JX1sLrcneC+y9/3OPJH9aaakpUqYllQC6KxNedlsmGy +6pJxaeQp8E+BgQQ8sqVb1MWoWWd7VRxJq3qdwudzTe/NCcLEVxLbAQ4jeQkHO6Lo +/IrPj8BGJJw4J+CDnRugv3gVEOuGTgpa/d/aLIJ+7sr2KeH6caH3iGicnPCNvg9J +kdjqOvn90Ghx2+m1K06Ckm9mH+Dw3EzsytHqunQG+bOEkJTRX45zGRBdAuVwpcAQ +0BB8b8VYSbSwbprafZX1zNoCr7gsfXmPvkPx+SgojQlD+Ajda8iLLCSxjVIHvXib +y8posqTdDEx5YMaZ0ZPxMBoH064iwurO8YQJzOAUbn8/ftKChazcqRZOhaBgy/ac +18izju3Gm5h1DVXoX+WViwKkrkMpKBGk5hIwAUt1ax5mnXkvpXYvHUC0bcl9eQjs +0Wq2XSqypWa9a4X0dFbD9ed1Uigspf9mR6XU/v6eVL9lfgHWMI+lNpyiUBzuOIAB +SMbHdPTGrMNASRZhdCyvjG817XsYAFs2PJxQDcqSMxDxJklt33UkN4Ii1+iW/RVL +ApY+B3KVfqs9TC7XyvDf4Fg/LS8EmjijAQIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUBpOjCl4oaTeqYR3r6/wtbyPk +86AwDQYJKoZIhvcNAQEMBQADggIBAJaAcgkGfpzMkwQWu6A6jZJOtxEaCnFxEM0E +rX+lRVAQZk5KQaID2RFPeje5S+LGjzJmdSX7684/AykmjbgWHfYfM25I5uj4V7Ib +ed87hwriZLoAymzvftAj63iP/2SbNDefNWWipAA9EiOWWF3KY4fGoweITedpdopT +zfFP7ELyk+OZpDc8h7hi2/DsHzc/N19DzFGdtfCXwreFamgLRB7lUe6TzktuhsHS +DCRZNhqfLJGP4xjblJUK7ZGqDpncllPjYYPGFrojutzdfhrGe0K22VoF3Jpf1d+4 +2kd92jjbrDnVHmtsKheMYc2xbXIBw8MgAGJoFjHVdqqGuw6qnsb58Nn4DSEC5MUo +FlkRudlpcyqSeLiSV5sI8jrlL5WwWLdrIBRtFO8KvH7YVdiI2i/6GaX7i+B/OfVy +K4XELKzvGUWSTLNhB9xNH27SgRNcmvMSZ4PPmz+Ln52kuaiWA3rF7iDeM9ovnhp6 +dB7h7sxaOgTdsxoEqBRjrLdHEoOabPXm6RUVkRqEGQ6UROcSjiVbgGcZ3GOTEAtl +Lor6CZpO2oYofaphNdgOpygau1LgePhsumywbrmHXumZNTfxPWQrqaA0k89jL9WB +365jJ6UeTo3cKXhZ+PmhIIynJkBugnLNeLLIjzwec+fBH7/PzqUqm9tEZDKgu39c +JRNItX+S +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd. +# Subject: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd. +# Label: "SecureSign Root CA15" +# Serial: 126083514594751269499665114766174399806381178503 +# MD5 Fingerprint: 13:30:fc:c4:62:a6:a9:de:b5:c1:68:af:b5:d2:31:47 +# SHA1 Fingerprint: cb:ba:83:c8:c1:5a:5d:f1:f9:73:6f:ca:d7:ef:28:13:06:4a:07:7d +# SHA256 Fingerprint: e7:78:f0:f0:95:fe:84:37:29:cd:1a:00:82:17:9e:53:14:a9:c2:91:44:28:05:e1:fb:1d:8f:b6:b8:88:6c:3a +-----BEGIN CERTIFICATE----- +MIICIzCCAamgAwIBAgIUFhXHw9hJp75pDIqI7fBw+d23PocwCgYIKoZIzj0EAwMw +UTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28uLCBM +dGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNTAeFw0yMDA0MDgwODMy +NTZaFw00NTA0MDgwODMyNTZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpDeWJl +cnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBSb290 +IENBMTUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQLUHSNZDKZmbPSYAi4Io5GdCx4 +wCtELW1fHcmuS1Iggz24FG1Th2CeX2yF2wYUleDHKP+dX+Sq8bOLbe1PL0vJSpSR +ZHX+AezB2Ot6lHhWGENfa4HL9rzatAy2KZMIaY+jQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTrQciu/NWeUUj1vYv0hyCTQSvT +9DAKBggqhkjOPQQDAwNoADBlAjEA2S6Jfl5OpBEHvVnCB96rMjhTKkZEBhd6zlHp +4P9mLQlO4E/0BdGF9jVg3PVys0Z9AjBEmEYagoUeYWmJSwdLZrWeqrqgHkHZAXQ6 +bkU6iYAZezKYVWOr62Nuk22rGwlgMU4= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH +# Subject: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH +# Label: "D-TRUST BR Root CA 2 2023" +# Serial: 153168538924886464690566649552453098598 +# MD5 Fingerprint: e1:09:ed:d3:60:d4:56:1b:47:1f:b7:0c:5f:1b:5f:85 +# SHA1 Fingerprint: 2d:b0:70:ee:71:94:af:69:68:17:db:79:ce:58:9f:a0:6b:96:f7:87 +# SHA256 Fingerprint: 05:52:e6:f8:3f:df:65:e8:fa:96:70:e6:66:df:28:a4:e2:13:40:b5:10:cb:e5:25:66:f9:7c:4f:b9:4b:2b:d1 +-----BEGIN CERTIFICATE----- +MIIFqTCCA5GgAwIBAgIQczswBEhb2U14LnNLyaHcZjANBgkqhkiG9w0BAQ0FADBI +MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE +LVRSVVNUIEJSIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA4NTYzMVoXDTM4MDUw +OTA4NTYzMFowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi +MCAGA1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAK7/CVmRgApKaOYkP7in5Mg6CjoWzckjYaCTcfKr +i3OPoGdlYNJUa2NRb0kz4HIHE304zQaSBylSa053bATTlfrdTIzZXcFhfUvnKLNE +gXtRr90zsWh81k5M/itoucpmacTsXld/9w3HnDY25QdgrMBM6ghs7wZ8T1soegj8 +k12b9py0i4a6Ibn08OhZWiihNIQaJZG2tY/vsvmA+vk9PBFy2OMvhnbFeSzBqZCT +Rphny4NqoFAjpzv2gTng7fC5v2Xx2Mt6++9zA84A9H3X4F07ZrjcjrqDy4d2A/wl +2ecjbwb9Z/Pg/4S8R7+1FhhGaRTMBffb00msa8yr5LULQyReS2tNZ9/WtT5PeB+U +cSTq3nD88ZP+npNa5JRal1QMNXtfbO4AHyTsA7oC9Xb0n9Sa7YUsOCIvx9gvdhFP +/Wxc6PWOJ4d/GUohR5AdeY0cW/jPSoXk7bNbjb7EZChdQcRurDhaTyN0dKkSw/bS +uREVMweR2Ds3OmMwBtHFIjYoYiMQ4EbMl6zWK11kJNXuHA7e+whadSr2Y23OC0K+ +0bpwHJwh5Q8xaRfX/Aq03u2AnMuStIv13lmiWAmlY0cL4UEyNEHZmrHZqLAbWt4N +DfTisl01gLmB1IRpkQLLddCNxbU9CZEJjxShFHR5PtbJFR2kWVki3PaKRT08EtY+ +XTIvAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUZ5Dw1t61 +GNVGKX5cq/ieCLxklRAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG +OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfYnJfcm9vdF9jYV8y +XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQA097N3U9swFrktpSHxQCF16+tI +FoE9c+CeJyrrd6kTpGoKWloUMz1oH4Guaf2Mn2VsNELZLdB/eBaxOqwjMa1ef67n +riv6uvw8l5VAk1/DLQOj7aRvU9f6QA4w9QAgLABMjDu0ox+2v5Eyq6+SmNMW5tTR +VFxDWy6u71cqqLRvpO8NVhTaIasgdp4D/Ca4nj8+AybmTNudX0KEPUUDAxxZiMrc +LmEkWqTqJwtzEr5SswrPMhfiHocaFpVIbVrg0M8JkiZmkdijYQ6qgYF/6FKC0ULn +4B0Y+qSFNueG4A3rvNTJ1jxD8V1Jbn6Bm2m1iWKPiFLY1/4nwSPFyysCu7Ff/vtD +hQNGvl3GyiEm/9cCnnRK3PgTFbGBVzbLZVzRHTF36SXDw7IyN9XxmAnkbWOACKsG +koHU6XCPpz+y7YaMgmo1yEJagtFSGkUPFaUA8JR7ZSdXOUPPfH/mvTWze/EZTN46 +ls/pdu4D58JDUjxqgejBWoC9EV2Ta/vH5mQ/u2kc6d0li690yVRAysuTEwrt+2aS +Ecr1wPrYg1UDfNPFIkZ1cGt5SAYqgpq/5usWDiJFAbzdNpQ0qTUmiteXue4Icr80 +knCDgKs4qllo3UCkGJCy89UDyibK79XH4I9TjvAA46jtn/mtd+ArY0+ew+43u3gJ +hJ65bvspmZDogNOfJA== +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia TLS ECC Root CA O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia TLS ECC Root CA O=TrustAsia Technologies, Inc. +# Label: "TrustAsia TLS ECC Root CA" +# Serial: 310892014698942880364840003424242768478804666567 +# MD5 Fingerprint: 09:48:04:77:d2:fc:65:93:71:66:b1:11:95:4f:06:8c +# SHA1 Fingerprint: b5:ec:39:f3:a1:66:37:ae:c3:05:94:57:e2:be:11:be:b7:a1:7f:36 +# SHA256 Fingerprint: c0:07:6b:9e:f0:53:1f:b1:a6:56:d6:7c:4e:be:97:cd:5d:ba:a4:1e:f4:45:98:ac:c2:48:98:78:c9:2d:87:11 +-----BEGIN CERTIFICATE----- +MIICMTCCAbegAwIBAgIUNnThTXxlE8msg1UloD5Sfi9QaMcwCgYIKoZIzj0EAwMw +WDELMAkGA1UEBhMCQ04xJTAjBgNVBAoTHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs +IEluYy4xIjAgBgNVBAMTGVRydXN0QXNpYSBUTFMgRUNDIFJvb3QgQ0EwHhcNMjQw +NTE1MDU0MTU2WhcNNDQwNTE1MDU0MTU1WjBYMQswCQYDVQQGEwJDTjElMCMGA1UE +ChMcVHJ1c3RBc2lhIFRlY2hub2xvZ2llcywgSW5jLjEiMCAGA1UEAxMZVHJ1c3RB +c2lhIFRMUyBFQ0MgUm9vdCBDQTB2MBAGByqGSM49AgEGBSuBBAAiA2IABLh/pVs/ +AT598IhtrimY4ZtcU5nb9wj/1WrgjstEpvDBjL1P1M7UiFPoXlfXTr4sP/MSpwDp +guMqWzJ8S5sUKZ74LYO1644xST0mYekdcouJtgq7nDM1D9rs3qlKH8kzsaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQULIVTu7FDzTLqnqOH/qKYqKaT6RAw +DgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2gAMGUCMFRH18MtYYZI9HlaVQ01 +L18N9mdsd0AaRuf4aFtOJx24mH1/k78ITcTaRTChD15KeAIxAKORh/IRM4PDwYqR +OkwrULG9IpRdNYlzg8WbGf60oenUoWa2AaU2+dhoYSi3dOGiMQ== +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia TLS RSA Root CA O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia TLS RSA Root CA O=TrustAsia Technologies, Inc. +# Label: "TrustAsia TLS RSA Root CA" +# Serial: 160405846464868906657516898462547310235378010780 +# MD5 Fingerprint: 3b:9e:c3:86:0f:34:3c:6b:c5:46:c4:8e:1d:e7:19:12 +# SHA1 Fingerprint: a5:46:50:c5:62:ea:95:9a:1a:a7:04:6f:17:58:c7:29:53:3d:03:fa +# SHA256 Fingerprint: 06:c0:8d:7d:af:d8:76:97:1e:b1:12:4f:e6:7f:84:7e:c0:c7:a1:58:d3:ea:53:cb:e9:40:e2:ea:97:91:f4:c3 +-----BEGIN CERTIFICATE----- +MIIFgDCCA2igAwIBAgIUHBjYz+VTPyI1RlNUJDxsR9FcSpwwDQYJKoZIhvcNAQEM +BQAwWDELMAkGA1UEBhMCQ04xJTAjBgNVBAoTHFRydXN0QXNpYSBUZWNobm9sb2dp +ZXMsIEluYy4xIjAgBgNVBAMTGVRydXN0QXNpYSBUTFMgUlNBIFJvb3QgQ0EwHhcN +MjQwNTE1MDU0MTU3WhcNNDQwNTE1MDU0MTU2WjBYMQswCQYDVQQGEwJDTjElMCMG +A1UEChMcVHJ1c3RBc2lhIFRlY2hub2xvZ2llcywgSW5jLjEiMCAGA1UEAxMZVHJ1 +c3RBc2lhIFRMUyBSU0EgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCC +AgoCggIBAMMWuBtqpERz5dZO9LnPWwvB0ZqB9WOwj0PBuwhaGnrhB3YmH49pVr7+ +NmDQDIPNlOrnxS1cLwUWAp4KqC/lYCZUlviYQB2srp10Zy9U+5RjmOMmSoPGlbYJ +Q1DNDX3eRA5gEk9bNb2/mThtfWza4mhzH/kxpRkQcwUqwzIZheo0qt1CHjCNP561 +HmHVb70AcnKtEj+qpklz8oYVlQwQX1Fkzv93uMltrOXVmPGZLmzjyUT5tUMnCE32 +ft5EebuyjBza00tsLtbDeLdM1aTk2tyKjg7/D8OmYCYozza/+lcK7Fs/6TAWe8Tb +xNRkoDD75f0dcZLdKY9BWN4ArTr9PXwaqLEX8E40eFgl1oUh63kd0Nyrz2I8sMeX +i9bQn9P+PN7F4/w6g3CEIR0JwqH8uyghZVNgepBtljhb//HXeltt08lwSUq6HTrQ +UNoyIBnkiz/r1RYmNzz7dZ6wB3C4FGB33PYPXFIKvF1tjVEK2sUYyJtt3LCDs3+j +TnhMmCWr8n4uIF6CFabW2I+s5c0yhsj55NqJ4js+k8UTav/H9xj8Z7XvGCxUq0DT +bE3txci3OE9kxJRMT6DNrqXGJyV1J23G2pyOsAWZ1SgRxSHUuPzHlqtKZFlhaxP8 +S8ySpg+kUb8OWJDZgoM5pl+z+m6Ss80zDoWo8SnTq1mt1tve1CuBAgMBAAGjQjBA +MA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFLgHkXlcBvRG/XtZylomkadFK/hT +MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQwFAAOCAgEAIZtqBSBdGBanEqT3 +Rz/NyjuujsCCztxIJXgXbODgcMTWltnZ9r96nBO7U5WS/8+S4PPFJzVXqDuiGev4 +iqME3mmL5Dw8veWv0BIb5Ylrc5tvJQJLkIKvQMKtuppgJFqBTQUYo+IzeXoLH5Pt +7DlK9RME7I10nYEKqG/odv6LTytpEoYKNDbdgptvT+Bz3Ul/KD7JO6NXBNiT2Twp +2xIQaOHEibgGIOcberyxk2GaGUARtWqFVwHxtlotJnMnlvm5P1vQiJ3koP26TpUJ +g3933FEFlJ0gcXax7PqJtZwuhfG5WyRasQmr2soaB82G39tp27RIGAAtvKLEiUUj +pQ7hRGU+isFqMB3iYPg6qocJQrmBktwliJiJ8Xw18WLK7nn4GS/+X/jbh87qqA8M +pugLoDzga5SYnH+tBuYc6kIQX+ImFTw3OffXvO645e8D7r0i+yiGNFjEWn9hongP +XvPKnbwbPKfILfanIhHKA9jnZwqKDss1jjQ52MjqjZ9k4DewbNfFj8GQYSbbJIwe +SsCI3zWQzj8C9GRh3sfIB5XeMhg6j6JCQCTl1jNdfK7vsU1P1FeQNWrcrgSXSYk0 +ly4wBOeY99sLAZDBHwo/+ML+TvrbmnNzFrwFuHnYWa8G5z9nODmxfKuU4CkUpijy +323imttUQ/hHWKNddBWcwauwxzQ= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH +# Subject: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH +# Label: "D-TRUST EV Root CA 2 2023" +# Serial: 139766439402180512324132425437959641711 +# MD5 Fingerprint: 96:b4:78:09:f0:09:cb:77:eb:bb:1b:4d:6f:36:bc:b6 +# SHA1 Fingerprint: a5:5b:d8:47:6c:8f:19:f7:4c:f4:6d:6b:b6:c2:79:82:22:df:54:8b +# SHA256 Fingerprint: 8e:82:21:b2:e7:d4:00:78:36:a1:67:2f:0d:cc:29:9c:33:bc:07:d3:16:f1:32:fa:1a:20:6d:58:71:50:f1:ce +-----BEGIN CERTIFICATE----- +MIIFqTCCA5GgAwIBAgIQaSYJfoBLTKCnjHhiU19abzANBgkqhkiG9w0BAQ0FADBI +MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE +LVRSVVNUIEVWIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA5MTAzM1oXDTM4MDUw +OTA5MTAzMlowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi +MCAGA1UEAxMZRC1UUlVTVCBFViBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBANiOo4mAC7JXUtypU0w3uX9jFxPvp1sjW2l1sJkK +F8GLxNuo4MwxusLyzV3pt/gdr2rElYfXR8mV2IIEUD2BCP/kPbOx1sWy/YgJ25yE +7CUXFId/MHibaljJtnMoPDT3mfd/06b4HEV8rSyMlD/YZxBTfiLNTiVR8CUkNRFe +EMbsh2aJgWi6zCudR3Mfvc2RpHJqnKIbGKBv7FD0fUDCqDDPvXPIEysQEx6Lmqg6 +lHPTGGkKSv/BAQP/eX+1SH977ugpbzZMlWGG2Pmic4ruri+W7mjNPU0oQvlFKzIb +RlUWaqZLKfm7lVa/Rh3sHZMdwGWyH6FDrlaeoLGPaxK3YG14C8qKXO0elg6DpkiV +jTujIcSuWMYAsoS0I6SWhjW42J7YrDRJmGOVxcttSEfi8i4YHtAxq9107PncjLgc +jmgjutDzUNzPZY9zOjLHfP7KgiJPvo5iR2blzYfi6NUPGJ/lBHJLRjwQ8kTCZFZx +TnXonMkmdMV9WdEKWw9t/p51HBjGGjp82A0EzM23RWV6sY+4roRIPrN6TagD4uJ+ +ARZZaBhDM7DS3LAaQzXupdqpRlyuhoFBAUp0JuyfBr/CBTdkdXgpaP3F9ev+R/nk +hbDhezGdpn9yo7nELC7MmVcOIQxFAZRl62UJxmMiCzNJkkg8/M3OsD6Onov4/knF +NXJHAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUqvyREBuH +kV8Wub9PS5FeAByxMoAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG +OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfZXZfcm9vdF9jYV8y +XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQCTy6UfmRHsmg1fLBWTxj++EI14 +QvBukEdHjqOSMo1wj/Zbjb6JzkcBahsgIIlbyIIQbODnmaprxiqgYzWRaoUlrRc4 +pZt+UPJ26oUFKidBK7GB0aL2QHWpDsvxVUjY7NHss+jOFKE17MJeNRqrphYBBo7q +3C+jisosketSjl8MmxfPy3MHGcRqwnNU73xDUmPBEcrCRbH0O1P1aa4846XerOhU +t7KR/aypH/KH5BfGSah82ApB9PI+53c0BFLd6IHyTS9URZ0V4U/M5d40VxDJI3IX +cI1QcB9WbMy5/zpaT2N6w25lBx2Eof+pDGOJbbJAiDnXH3dotfyc1dZnaVuodNv8 +ifYbMvekJKZ2t0dT741Jj6m2g1qllpBFYfXeA08mD6iL8AOWsKwV0HFaanuU5nCT +2vFp4LJiTZ6P/4mdm13NRemUAiKN4DV/6PEEeXFsVIP4M7kFMhtYVRFP0OUnR3Hs +7dpn1mKmS00PaaLJvOwiS5THaJQXfuKOKD62xur1NGyfN4gHONuGcfrNlUhDbqNP +gofXNJhuS5N5YHVpD/Aa1VP6IQzCP+k/HxiMkl14p3ZnGbuy6n/pcAlWVqOwDAst +Nl7F6cTVg8uGF5csbBNvh1qvSaYd2804BC5f4ko1Di1L+KIkBI3Y4WNeApI02phh +XBxvWHZks/wCuPWdCg== +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign RSA TLS Root CA 2022 - 1 O=SwissSign AG +# Subject: CN=SwissSign RSA TLS Root CA 2022 - 1 O=SwissSign AG +# Label: "SwissSign RSA TLS Root CA 2022 - 1" +# Serial: 388078645722908516278762308316089881486363258315 +# MD5 Fingerprint: 16:2e:e4:19:76:81:85:ba:8e:91:58:f1:15:ef:72:39 +# SHA1 Fingerprint: 81:34:0a:be:4c:cd:ce:cc:e7:7d:cc:8a:d4:57:e2:45:a0:77:5d:ce +# SHA256 Fingerprint: 19:31:44:f4:31:e0:fd:db:74:07:17:d4:de:92:6a:57:11:33:88:4b:43:60:d3:0e:27:29:13:cb:e6:60:ce:41 +-----BEGIN CERTIFICATE----- +MIIFkzCCA3ugAwIBAgIUQ/oMX04bgBhE79G0TzUfRPSA7cswDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzErMCkGA1UE +AxMiU3dpc3NTaWduIFJTQSBUTFMgUm9vdCBDQSAyMDIyIC0gMTAeFw0yMjA2MDgx +MTA4MjJaFw00NzA2MDgxMTA4MjJaMFExCzAJBgNVBAYTAkNIMRUwEwYDVQQKEwxT +d2lzc1NpZ24gQUcxKzApBgNVBAMTIlN3aXNzU2lnbiBSU0EgVExTIFJvb3QgQ0Eg +MjAyMiAtIDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDLKmjiC8NX +vDVjvHClO/OMPE5Xlm7DTjak9gLKHqquuN6orx122ro10JFwB9+zBvKK8i5VUXu7 +LCTLf5ImgKO0lPaCoaTo+nUdWfMHamFk4saMla+ju45vVs9xzF6BYQ1t8qsCLqSX +5XH8irCRIFucdFJtrhUnWXjyCcplDn/L9Ovn3KlMd/YrFgSVrpxxpT8q2kFC5zyE +EPThPYxr4iuRR1VPuFa+Rd4iUU1OKNlfGUEGjw5NBuBwQCMBauTLE5tzrE0USJIt +/m2n+IdreXXhvhCxqohAWVTXz8TQm0SzOGlkjIHRI36qOTw7D59Ke4LKa2/KIj4x +0LDQKhySio/YGZxH5D4MucLNvkEM+KRHBdvBFzA4OmnczcNpI/2aDwLOEGrOyvi5 +KaM2iYauC8BPY7kGWUleDsFpswrzd34unYyzJ5jSmY0lpx+Gs6ZUcDj8fV3oT4MM +0ZPlEuRU2j7yrTrePjxF8CgPBrnh25d7mUWe3f6VWQQvdT/TromZhqwUtKiE+shd +OxtYk8EXlFXIC+OCeYSf8wCENO7cMdWP8vpPlkwGqnj73mSiI80fPsWMvDdUDrta +clXvyFu1cvh43zcgTFeRc5JzrBh3Q4IgaezprClG5QtO+DdziZaKHG29777YtvTK +wP1H8K4LWCDFyB02rpeNUIMmJCn3nTsPBQIDAQABo2MwYTAPBgNVHRMBAf8EBTAD +AQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBRvjmKLk0Ow4UD2p8P98Q+4 +DxU4pTAdBgNVHQ4EFgQUb45ii5NDsOFA9qfD/fEPuA8VOKUwDQYJKoZIhvcNAQEL +BQADggIBAKwsKUF9+lz1GpUYvyypiqkkVHX1uECry6gkUSsYP2OprphWKwVDIqO3 +10aewCoSPY6WlkDfDDOLazeROpW7OSltwAJsipQLBwJNGD77+3v1dj2b9l4wBlgz +Hqp41eZUBDqyggmNzhYzWUUo8aWjlw5DI/0LIICQ/+Mmz7hkkeUFjxOgdg3XNwwQ +iJb0Pr6VvfHDffCjw3lHC1ySFWPtUnWK50Zpy1FVCypM9fJkT6lc/2cyjlUtMoIc +gC9qkfjLvH4YoiaoLqNTKIftV+Vlek4ASltOU8liNr3CjlvrzG4ngRhZi0Rjn9UM +ZfQpZX+RLOV/fuiJz48gy20HQhFRJjKKLjpHE7iNvUcNCfAWpO2Whi4Z2L6MOuhF +LhG6rlrnub+xzI/goP+4s9GFe3lmozm1O2bYQL7Pt2eLSMkZJVX8vY3PXtpOpvJp +zv1/THfQwUY1mFwjmwJFQ5Ra3bxHrSL+ul4vkSkphnsh3m5kt8sNjzdbowhq6/Td +Ao9QAwKxuDdollDruF/UKIqlIgyKhPBZLtU30WHlQnNYKoH3dtvi4k0NX/a3vgW0 +rk4N3hY9A4GzJl5LuEsAz/+MF7psYC0nhzck5npgL7XTgwSqT0N1osGDsieYK7EO +gLrAhV5Cud+xYJHT6xh+cHiudoO+cVrQkOPKwRYlZ0rwtnu64ZzZ +-----END CERTIFICATE----- + +# Issuer: CN=OISTE Server Root ECC G1 O=OISTE Foundation +# Subject: CN=OISTE Server Root ECC G1 O=OISTE Foundation +# Label: "OISTE Server Root ECC G1" +# Serial: 47819833811561661340092227008453318557 +# MD5 Fingerprint: 42:a7:d2:35:ae:02:92:db:19:76:08:de:2f:05:b4:d4 +# SHA1 Fingerprint: 3b:f6:8b:09:ae:2a:92:7b:ba:e3:8d:3f:11:95:d9:e6:44:0c:45:e2 +# SHA256 Fingerprint: ee:c9:97:c0:c3:0f:21:6f:7e:3b:8b:30:7d:2b:ae:42:41:2d:75:3f:c8:21:9d:af:d1:52:0b:25:72:85:0f:49 +-----BEGIN CERTIFICATE----- +MIICNTCCAbqgAwIBAgIQI/nD1jWvjyhLH/BU6n6XnTAKBggqhkjOPQQDAzBLMQsw +CQYDVQQGEwJDSDEZMBcGA1UECgwQT0lTVEUgRm91bmRhdGlvbjEhMB8GA1UEAwwY +T0lTVEUgU2VydmVyIFJvb3QgRUNDIEcxMB4XDTIzMDUzMTE0NDIyOFoXDTQ4MDUy +NDE0NDIyN1owSzELMAkGA1UEBhMCQ0gxGTAXBgNVBAoMEE9JU1RFIEZvdW5kYXRp +b24xITAfBgNVBAMMGE9JU1RFIFNlcnZlciBSb290IEVDQyBHMTB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABBcv+hK8rBjzCvRE1nZCnrPoH7d5qVi2+GXROiFPqOujvqQy +cvO2Ackr/XeFblPdreqqLiWStukhEaivtUwL85Zgmjvn6hp4LrQ95SjeHIC6XG4N +2xml4z+cKrhAS93mT6NjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBQ3 +TYhlz/w9itWj8UnATgwQb0K0nDAdBgNVHQ4EFgQUN02IZc/8PYrVo/FJwE4MEG9C +tJwwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2kAMGYCMQCpKjAd0MKfkFFR +QD6VVCHNFmb3U2wIFjnQEnx/Yxvf4zgAOdktUyBFCxxgZzFDJe0CMQCSia7pXGKD +YmH5LVerVrkR3SW+ak5KGoJr3M/TvEqzPNcum9v4KGm8ay3sMaE641c= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE Server Root RSA G1 O=OISTE Foundation +# Subject: CN=OISTE Server Root RSA G1 O=OISTE Foundation +# Label: " OISTE Server Root RSA G1" +# Serial: 113845518112613905024960613408179309848 +# MD5 Fingerprint: 23:a7:9e:d4:70:b8:b9:14:57:41:8a:7e:44:59:e2:68 +# SHA1 Fingerprint: f7:00:34:25:94:88:68:31:e4:34:87:3f:70:fe:86:b3:86:9f:f0:6e +# SHA256 Fingerprint: 9a:e3:62:32:a5:18:9f:fd:db:35:3d:fd:26:52:0c:01:53:95:d2:27:77:da:c5:9d:b5:7b:98:c0:89:a6:51:e6 +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIQVaXZZ5Qoxu0M+ifdWwFNGDANBgkqhkiG9w0BAQwFADBL +MQswCQYDVQQGEwJDSDEZMBcGA1UECgwQT0lTVEUgRm91bmRhdGlvbjEhMB8GA1UE +AwwYT0lTVEUgU2VydmVyIFJvb3QgUlNBIEcxMB4XDTIzMDUzMTE0MzcxNloXDTQ4 +MDUyNDE0MzcxNVowSzELMAkGA1UEBhMCQ0gxGTAXBgNVBAoMEE9JU1RFIEZvdW5k +YXRpb24xITAfBgNVBAMMGE9JU1RFIFNlcnZlciBSb290IFJTQSBHMTCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAKqu9KuCz/vlNwvn1ZatkOhLKdxVYOPM +vLO8LZK55KN68YG0nnJyQ98/qwsmtO57Gmn7KNByXEptaZnwYx4M0rH/1ow00O7b +rEi56rAUjtgHqSSY3ekJvqgiG1k50SeH3BzN+Puz6+mTeO0Pzjd8JnduodgsIUzk +ik/HEzxux9UTl7Ko2yRpg1bTacuCErudG/L4NPKYKyqOBGf244ehHa1uzjZ0Dl4z +O8vbUZeUapU8zhhabkvG/AePLhq5SvdkNCncpo1Q4Y2LS+VIG24ugBA/5J8bZT8R +tOpXaZ+0AOuFJJkk9SGdl6r7NH8CaxWQrbueWhl/pIzY+m0o/DjH40ytas7ZTpOS +jswMZ78LS5bOZmdTaMsXEY5Z96ycG7mOaES3GK/m5Q9l3JUJsJMStR8+lKXHiHUh +sd4JJCpM4rzsTGdHwimIuQq6+cF0zowYJmXa92/GjHtoXAvuY8BeS/FOzJ8vD+Ho +mnqT8eDI278n5mUpezbgMxVz8p1rhAhoKzYHKyfMeNhqhw5HdPSqoBNdZH702xSu ++zrkL8Fl47l6QGzwBrd7KJvX4V84c5Ss2XCTLdyEr0YconosP4EmQufU2MVshGYR +i3drVByjtdgQ8K4p92cIiBdcuJd5z+orKu5YM+Vt6SmqZQENghPsJQtdLEByFSnT +kCz3GkPVavBpAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAU +8snBDw1jALvsRQ5KH7WxszbNDo0wHQYDVR0OBBYEFPLJwQ8NYwC77EUOSh+1sbM2 +zQ6NMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQwFAAOCAgEANGd5sjrG5T33 +I3K5Ce+SrScfoE4KsvXaFwyihdJ+klH9FWXXXGtkFu6KRcoMQzZENdl//nk6HOjG +5D1rd9QhEOP28yBOqb6J8xycqd+8MDoX0TJD0KqKchxRKEzdNsjkLWd9kYccnbz8 +qyiWXmFcuCIzGEgWUOrKL+mlSdx/PKQZvDatkuK59EvV6wit53j+F8Bdh3foZ3dP +AGav9LEDOr4SfEE15fSmG0eLy3n31r8Xbk5l8PjaV8GUgeV6Vg27Rn9vkf195hfk +gSe7BYhW3SCl95gtkRlpMV+bMPKZrXJAlszYd2abtNUOshD+FKrDgHGdPY3ofRRs +YWSGRqbXVMW215AWRqWFyp464+YTFrYVI8ypKVL9AMb2kI5Wj4kI3Zaq5tNqqYY1 +9tVFeEJKRvwDyF7YZvZFZSS0vod7VSCd9521Kvy5YhnLbDuv0204bKt7ph6N/Ome +/msVuduCmsuY33OhkKCgxeDoAaijFJzIwZqsFVAzje18KotzlUBDJvyBpCpfOZC3 +J8tRd/iWkx7P8nd9H0aTolkelUTFLXVksNb54Dxp6gS1HAviRkRNQzuXSXERvSS2 +wq1yVAb+axj5d9spLFKebXd7Yv0PTY6YMjAwcRLWJTXjn/hvnLXrahut6hDTlhZy +BiElxky8j3C7DOReIoMt0r7+hVu05L0= +-----END CERTIFICATE----- diff --git a/venv/lib/python3.13/site-packages/certifi/core.py b/venv/lib/python3.13/site-packages/certifi/core.py new file mode 100644 index 0000000000000000000000000000000000000000..1c9661cc7c2f6917c2506a30b2710002f81ab23a --- /dev/null +++ b/venv/lib/python3.13/site-packages/certifi/core.py @@ -0,0 +1,83 @@ +""" +certifi.py +~~~~~~~~~~ + +This module returns the installation location of cacert.pem or its contents. +""" +import sys +import atexit + +def exit_cacert_ctx() -> None: + _CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr] + + +if sys.version_info >= (3, 11): + + from importlib.resources import as_file, files + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the file + # in cases where we're inside of a zipimport situation until someone + # actually calls where(), but we don't want to re-extract the file + # on every call of where(), so we'll do it once then store it in a + # global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you to + # manage the cleanup of this file, so it doesn't actually return a + # path, it returns a context manager that will give you the path + # when you enter it and will do any cleanup when you leave it. In + # the common case of not needing a temporary file, it will just + # return the file system location and the __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem")) + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + atexit.register(exit_cacert_ctx) + + return _CACERT_PATH + + def contents() -> str: + return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii") + +else: + + from importlib.resources import path as get_path, read_text + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the + # file in cases where we're inside of a zipimport situation until + # someone actually calls where(), but we don't want to re-extract + # the file on every call of where(), so we'll do it once then store + # it in a global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you + # to manage the cleanup of this file, so it doesn't actually + # return a path, it returns a context manager that will give + # you the path when you enter it and will do any cleanup when + # you leave it. In the common case of not needing a temporary + # file, it will just return the file system location and the + # __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = get_path("certifi", "cacert.pem") + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + atexit.register(exit_cacert_ctx) + + return _CACERT_PATH + + def contents() -> str: + return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/venv/lib/python3.13/site-packages/certifi/py.typed b/venv/lib/python3.13/site-packages/certifi/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.13/site-packages/charset_normalizer/__init__.py b/venv/lib/python3.13/site-packages/charset_normalizer/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0d3a37990145e94ad85406166dbaf52f4c311e5e --- /dev/null +++ b/venv/lib/python3.13/site-packages/charset_normalizer/__init__.py @@ -0,0 +1,48 @@ +""" +Charset-Normalizer +~~~~~~~~~~~~~~ +The Real First Universal Charset Detector. +A library that helps you read text from an unknown charset encoding. +Motivated by chardet, This package is trying to resolve the issue by taking a new approach. +All IANA character set names for which the Python core library provides codecs are supported. + +Basic usage: + >>> from charset_normalizer import from_bytes + >>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8')) + >>> best_guess = results.best() + >>> str(best_guess) + 'Bсеки човек има право на образование. Oбразованието!' + +Others methods and usages are available - see the full documentation +at . +:copyright: (c) 2021 by Ahmed TAHRI +:license: MIT, see LICENSE for more details. +""" + +from __future__ import annotations + +import logging + +from .api import from_bytes, from_fp, from_path, is_binary +from .legacy import detect +from .models import CharsetMatch, CharsetMatches +from .utils import set_logging_handler +from .version import VERSION, __version__ + +__all__ = ( + "from_fp", + "from_path", + "from_bytes", + "is_binary", + "detect", + "CharsetMatch", + "CharsetMatches", + "__version__", + "VERSION", + "set_logging_handler", +) + +# Attach a NullHandler to the top level logger by default +# https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library + +logging.getLogger("charset_normalizer").addHandler(logging.NullHandler()) diff --git a/venv/lib/python3.13/site-packages/charset_normalizer/__main__.py b/venv/lib/python3.13/site-packages/charset_normalizer/__main__.py new file mode 100644 index 0000000000000000000000000000000000000000..e0e76f7bfbb411d4424d3a1834b0ea803d80ea7e --- /dev/null +++ b/venv/lib/python3.13/site-packages/charset_normalizer/__main__.py @@ -0,0 +1,6 @@ +from __future__ import annotations + +from .cli import cli_detect + +if __name__ == "__main__": + cli_detect() diff --git a/venv/lib/python3.13/site-packages/charset_normalizer/api.py b/venv/lib/python3.13/site-packages/charset_normalizer/api.py new file mode 100644 index 0000000000000000000000000000000000000000..ebd96390507bac07ae21d513b45777011a539892 --- /dev/null +++ b/venv/lib/python3.13/site-packages/charset_normalizer/api.py @@ -0,0 +1,669 @@ +from __future__ import annotations + +import logging +from os import PathLike +from typing import BinaryIO + +from .cd import ( + coherence_ratio, + encoding_languages, + mb_encoding_languages, + merge_coherence_ratios, +) +from .constant import IANA_SUPPORTED, TOO_BIG_SEQUENCE, TOO_SMALL_SEQUENCE, TRACE +from .md import mess_ratio +from .models import CharsetMatch, CharsetMatches +from .utils import ( + any_specified_encoding, + cut_sequence_chunks, + iana_name, + identify_sig_or_bom, + is_cp_similar, + is_multi_byte_encoding, + should_strip_sig_or_bom, +) + +logger = logging.getLogger("charset_normalizer") +explain_handler = logging.StreamHandler() +explain_handler.setFormatter( + logging.Formatter("%(asctime)s | %(levelname)s | %(message)s") +) + + +def from_bytes( + sequences: bytes | bytearray, + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.2, + cp_isolation: list[str] | None = None, + cp_exclusion: list[str] | None = None, + preemptive_behaviour: bool = True, + explain: bool = False, + language_threshold: float = 0.1, + enable_fallback: bool = True, +) -> CharsetMatches: + """ + Given a raw bytes sequence, return the best possibles charset usable to render str objects. + If there is no results, it is a strong indicator that the source is binary/not text. + By default, the process will extract 5 blocks of 512o each to assess the mess and coherence of a given sequence. + And will give up a particular code page after 20% of measured mess. Those criteria are customizable at will. + + The preemptive behavior DOES NOT replace the traditional detection workflow, it prioritize a particular code page + but never take it for granted. Can improve the performance. + + You may want to focus your attention to some code page or/and not others, use cp_isolation and cp_exclusion for that + purpose. + + This function will strip the SIG in the payload/sequence every time except on UTF-16, UTF-32. + By default the library does not setup any handler other than the NullHandler, if you choose to set the 'explain' + toggle to True it will alter the logger configuration to add a StreamHandler that is suitable for debugging. + Custom logging format and handler can be set manually. + """ + + if not isinstance(sequences, (bytearray, bytes)): + raise TypeError( + "Expected object of type bytes or bytearray, got: {}".format( + type(sequences) + ) + ) + + if explain: + previous_logger_level: int = logger.level + logger.addHandler(explain_handler) + logger.setLevel(TRACE) + + length: int = len(sequences) + + if length == 0: + logger.debug("Encoding detection on empty bytes, assuming utf_8 intention.") + if explain: # Defensive: ensure exit path clean handler + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level or logging.WARNING) + return CharsetMatches([CharsetMatch(sequences, "utf_8", 0.0, False, [], "")]) + + if cp_isolation is not None: + logger.log( + TRACE, + "cp_isolation is set. use this flag for debugging purpose. " + "limited list of encoding allowed : %s.", + ", ".join(cp_isolation), + ) + cp_isolation = [iana_name(cp, False) for cp in cp_isolation] + else: + cp_isolation = [] + + if cp_exclusion is not None: + logger.log( + TRACE, + "cp_exclusion is set. use this flag for debugging purpose. " + "limited list of encoding excluded : %s.", + ", ".join(cp_exclusion), + ) + cp_exclusion = [iana_name(cp, False) for cp in cp_exclusion] + else: + cp_exclusion = [] + + if length <= (chunk_size * steps): + logger.log( + TRACE, + "override steps (%i) and chunk_size (%i) as content does not fit (%i byte(s) given) parameters.", + steps, + chunk_size, + length, + ) + steps = 1 + chunk_size = length + + if steps > 1 and length / steps < chunk_size: + chunk_size = int(length / steps) + + is_too_small_sequence: bool = len(sequences) < TOO_SMALL_SEQUENCE + is_too_large_sequence: bool = len(sequences) >= TOO_BIG_SEQUENCE + + if is_too_small_sequence: + logger.log( + TRACE, + "Trying to detect encoding from a tiny portion of ({}) byte(s).".format( + length + ), + ) + elif is_too_large_sequence: + logger.log( + TRACE, + "Using lazy str decoding because the payload is quite large, ({}) byte(s).".format( + length + ), + ) + + prioritized_encodings: list[str] = [] + + specified_encoding: str | None = ( + any_specified_encoding(sequences) if preemptive_behaviour else None + ) + + if specified_encoding is not None: + prioritized_encodings.append(specified_encoding) + logger.log( + TRACE, + "Detected declarative mark in sequence. Priority +1 given for %s.", + specified_encoding, + ) + + tested: set[str] = set() + tested_but_hard_failure: list[str] = [] + tested_but_soft_failure: list[str] = [] + + fallback_ascii: CharsetMatch | None = None + fallback_u8: CharsetMatch | None = None + fallback_specified: CharsetMatch | None = None + + results: CharsetMatches = CharsetMatches() + + early_stop_results: CharsetMatches = CharsetMatches() + + sig_encoding, sig_payload = identify_sig_or_bom(sequences) + + if sig_encoding is not None: + prioritized_encodings.append(sig_encoding) + logger.log( + TRACE, + "Detected a SIG or BOM mark on first %i byte(s). Priority +1 given for %s.", + len(sig_payload), + sig_encoding, + ) + + prioritized_encodings.append("ascii") + + if "utf_8" not in prioritized_encodings: + prioritized_encodings.append("utf_8") + + for encoding_iana in prioritized_encodings + IANA_SUPPORTED: + if cp_isolation and encoding_iana not in cp_isolation: + continue + + if cp_exclusion and encoding_iana in cp_exclusion: + continue + + if encoding_iana in tested: + continue + + tested.add(encoding_iana) + + decoded_payload: str | None = None + bom_or_sig_available: bool = sig_encoding == encoding_iana + strip_sig_or_bom: bool = bom_or_sig_available and should_strip_sig_or_bom( + encoding_iana + ) + + if encoding_iana in {"utf_16", "utf_32"} and not bom_or_sig_available: + logger.log( + TRACE, + "Encoding %s won't be tested as-is because it require a BOM. Will try some sub-encoder LE/BE.", + encoding_iana, + ) + continue + if encoding_iana in {"utf_7"} and not bom_or_sig_available: + logger.log( + TRACE, + "Encoding %s won't be tested as-is because detection is unreliable without BOM/SIG.", + encoding_iana, + ) + continue + + try: + is_multi_byte_decoder: bool = is_multi_byte_encoding(encoding_iana) + except (ModuleNotFoundError, ImportError): + logger.log( + TRACE, + "Encoding %s does not provide an IncrementalDecoder", + encoding_iana, + ) + continue + + try: + if is_too_large_sequence and is_multi_byte_decoder is False: + str( + ( + sequences[: int(50e4)] + if strip_sig_or_bom is False + else sequences[len(sig_payload) : int(50e4)] + ), + encoding=encoding_iana, + ) + else: + decoded_payload = str( + ( + sequences + if strip_sig_or_bom is False + else sequences[len(sig_payload) :] + ), + encoding=encoding_iana, + ) + except (UnicodeDecodeError, LookupError) as e: + if not isinstance(e, LookupError): + logger.log( + TRACE, + "Code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + tested_but_hard_failure.append(encoding_iana) + continue + + similar_soft_failure_test: bool = False + + for encoding_soft_failed in tested_but_soft_failure: + if is_cp_similar(encoding_iana, encoding_soft_failed): + similar_soft_failure_test = True + break + + if similar_soft_failure_test: + logger.log( + TRACE, + "%s is deemed too similar to code page %s and was consider unsuited already. Continuing!", + encoding_iana, + encoding_soft_failed, + ) + continue + + r_ = range( + 0 if not bom_or_sig_available else len(sig_payload), + length, + int(length / steps), + ) + + multi_byte_bonus: bool = ( + is_multi_byte_decoder + and decoded_payload is not None + and len(decoded_payload) < length + ) + + if multi_byte_bonus: + logger.log( + TRACE, + "Code page %s is a multi byte encoding table and it appear that at least one character " + "was encoded using n-bytes.", + encoding_iana, + ) + + max_chunk_gave_up: int = int(len(r_) / 4) + + max_chunk_gave_up = max(max_chunk_gave_up, 2) + early_stop_count: int = 0 + lazy_str_hard_failure = False + + md_chunks: list[str] = [] + md_ratios = [] + + try: + for chunk in cut_sequence_chunks( + sequences, + encoding_iana, + r_, + chunk_size, + bom_or_sig_available, + strip_sig_or_bom, + sig_payload, + is_multi_byte_decoder, + decoded_payload, + ): + md_chunks.append(chunk) + + md_ratios.append( + mess_ratio( + chunk, + threshold, + explain is True and 1 <= len(cp_isolation) <= 2, + ) + ) + + if md_ratios[-1] >= threshold: + early_stop_count += 1 + + if (early_stop_count >= max_chunk_gave_up) or ( + bom_or_sig_available and strip_sig_or_bom is False + ): + break + except ( + UnicodeDecodeError + ) as e: # Lazy str loading may have missed something there + logger.log( + TRACE, + "LazyStr Loading: After MD chunk decode, code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + early_stop_count = max_chunk_gave_up + lazy_str_hard_failure = True + + # We might want to check the sequence again with the whole content + # Only if initial MD tests passes + if ( + not lazy_str_hard_failure + and is_too_large_sequence + and not is_multi_byte_decoder + ): + try: + sequences[int(50e3) :].decode(encoding_iana, errors="strict") + except UnicodeDecodeError as e: + logger.log( + TRACE, + "LazyStr Loading: After final lookup, code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + tested_but_hard_failure.append(encoding_iana) + continue + + mean_mess_ratio: float = sum(md_ratios) / len(md_ratios) if md_ratios else 0.0 + if mean_mess_ratio >= threshold or early_stop_count >= max_chunk_gave_up: + tested_but_soft_failure.append(encoding_iana) + logger.log( + TRACE, + "%s was excluded because of initial chaos probing. Gave up %i time(s). " + "Computed mean chaos is %f %%.", + encoding_iana, + early_stop_count, + round(mean_mess_ratio * 100, ndigits=3), + ) + # Preparing those fallbacks in case we got nothing. + if ( + enable_fallback + and encoding_iana + in ["ascii", "utf_8", specified_encoding, "utf_16", "utf_32"] + and not lazy_str_hard_failure + ): + fallback_entry = CharsetMatch( + sequences, + encoding_iana, + threshold, + bom_or_sig_available, + [], + decoded_payload, + preemptive_declaration=specified_encoding, + ) + if encoding_iana == specified_encoding: + fallback_specified = fallback_entry + elif encoding_iana == "ascii": + fallback_ascii = fallback_entry + else: + fallback_u8 = fallback_entry + continue + + logger.log( + TRACE, + "%s passed initial chaos probing. Mean measured chaos is %f %%", + encoding_iana, + round(mean_mess_ratio * 100, ndigits=3), + ) + + if not is_multi_byte_decoder: + target_languages: list[str] = encoding_languages(encoding_iana) + else: + target_languages = mb_encoding_languages(encoding_iana) + + if target_languages: + logger.log( + TRACE, + "{} should target any language(s) of {}".format( + encoding_iana, str(target_languages) + ), + ) + + cd_ratios = [] + + # We shall skip the CD when its about ASCII + # Most of the time its not relevant to run "language-detection" on it. + if encoding_iana != "ascii": + for chunk in md_chunks: + chunk_languages = coherence_ratio( + chunk, + language_threshold, + ",".join(target_languages) if target_languages else None, + ) + + cd_ratios.append(chunk_languages) + + cd_ratios_merged = merge_coherence_ratios(cd_ratios) + + if cd_ratios_merged: + logger.log( + TRACE, + "We detected language {} using {}".format( + cd_ratios_merged, encoding_iana + ), + ) + + current_match = CharsetMatch( + sequences, + encoding_iana, + mean_mess_ratio, + bom_or_sig_available, + cd_ratios_merged, + ( + decoded_payload + if ( + is_too_large_sequence is False + or encoding_iana in [specified_encoding, "ascii", "utf_8"] + ) + else None + ), + preemptive_declaration=specified_encoding, + ) + + results.append(current_match) + + if ( + encoding_iana in [specified_encoding, "ascii", "utf_8"] + and mean_mess_ratio < 0.1 + ): + # If md says nothing to worry about, then... stop immediately! + if mean_mess_ratio == 0.0: + logger.debug( + "Encoding detection: %s is most likely the one.", + current_match.encoding, + ) + if explain: # Defensive: ensure exit path clean handler + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + return CharsetMatches([current_match]) + + early_stop_results.append(current_match) + + if ( + len(early_stop_results) + and (specified_encoding is None or specified_encoding in tested) + and "ascii" in tested + and "utf_8" in tested + ): + probable_result: CharsetMatch = early_stop_results.best() # type: ignore[assignment] + logger.debug( + "Encoding detection: %s is most likely the one.", + probable_result.encoding, + ) + if explain: # Defensive: ensure exit path clean handler + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + + return CharsetMatches([probable_result]) + + if encoding_iana == sig_encoding: + logger.debug( + "Encoding detection: %s is most likely the one as we detected a BOM or SIG within " + "the beginning of the sequence.", + encoding_iana, + ) + if explain: # Defensive: ensure exit path clean handler + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + return CharsetMatches([results[encoding_iana]]) + + if len(results) == 0: + if fallback_u8 or fallback_ascii or fallback_specified: + logger.log( + TRACE, + "Nothing got out of the detection process. Using ASCII/UTF-8/Specified fallback.", + ) + + if fallback_specified: + logger.debug( + "Encoding detection: %s will be used as a fallback match", + fallback_specified.encoding, + ) + results.append(fallback_specified) + elif ( + (fallback_u8 and fallback_ascii is None) + or ( + fallback_u8 + and fallback_ascii + and fallback_u8.fingerprint != fallback_ascii.fingerprint + ) + or (fallback_u8 is not None) + ): + logger.debug("Encoding detection: utf_8 will be used as a fallback match") + results.append(fallback_u8) + elif fallback_ascii: + logger.debug("Encoding detection: ascii will be used as a fallback match") + results.append(fallback_ascii) + + if results: + logger.debug( + "Encoding detection: Found %s as plausible (best-candidate) for content. With %i alternatives.", + results.best().encoding, # type: ignore + len(results) - 1, + ) + else: + logger.debug("Encoding detection: Unable to determine any suitable charset.") + + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + + return results + + +def from_fp( + fp: BinaryIO, + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: list[str] | None = None, + cp_exclusion: list[str] | None = None, + preemptive_behaviour: bool = True, + explain: bool = False, + language_threshold: float = 0.1, + enable_fallback: bool = True, +) -> CharsetMatches: + """ + Same thing than the function from_bytes but using a file pointer that is already ready. + Will not close the file pointer. + """ + return from_bytes( + fp.read(), + steps, + chunk_size, + threshold, + cp_isolation, + cp_exclusion, + preemptive_behaviour, + explain, + language_threshold, + enable_fallback, + ) + + +def from_path( + path: str | bytes | PathLike, # type: ignore[type-arg] + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: list[str] | None = None, + cp_exclusion: list[str] | None = None, + preemptive_behaviour: bool = True, + explain: bool = False, + language_threshold: float = 0.1, + enable_fallback: bool = True, +) -> CharsetMatches: + """ + Same thing than the function from_bytes but with one extra step. Opening and reading given file path in binary mode. + Can raise IOError. + """ + with open(path, "rb") as fp: + return from_fp( + fp, + steps, + chunk_size, + threshold, + cp_isolation, + cp_exclusion, + preemptive_behaviour, + explain, + language_threshold, + enable_fallback, + ) + + +def is_binary( + fp_or_path_or_payload: PathLike | str | BinaryIO | bytes, # type: ignore[type-arg] + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: list[str] | None = None, + cp_exclusion: list[str] | None = None, + preemptive_behaviour: bool = True, + explain: bool = False, + language_threshold: float = 0.1, + enable_fallback: bool = False, +) -> bool: + """ + Detect if the given input (file, bytes, or path) points to a binary file. aka. not a string. + Based on the same main heuristic algorithms and default kwargs at the sole exception that fallbacks match + are disabled to be stricter around ASCII-compatible but unlikely to be a string. + """ + if isinstance(fp_or_path_or_payload, (str, PathLike)): + guesses = from_path( + fp_or_path_or_payload, + steps=steps, + chunk_size=chunk_size, + threshold=threshold, + cp_isolation=cp_isolation, + cp_exclusion=cp_exclusion, + preemptive_behaviour=preemptive_behaviour, + explain=explain, + language_threshold=language_threshold, + enable_fallback=enable_fallback, + ) + elif isinstance( + fp_or_path_or_payload, + ( + bytes, + bytearray, + ), + ): + guesses = from_bytes( + fp_or_path_or_payload, + steps=steps, + chunk_size=chunk_size, + threshold=threshold, + cp_isolation=cp_isolation, + cp_exclusion=cp_exclusion, + preemptive_behaviour=preemptive_behaviour, + explain=explain, + language_threshold=language_threshold, + enable_fallback=enable_fallback, + ) + else: + guesses = from_fp( + fp_or_path_or_payload, + steps=steps, + chunk_size=chunk_size, + threshold=threshold, + cp_isolation=cp_isolation, + cp_exclusion=cp_exclusion, + preemptive_behaviour=preemptive_behaviour, + explain=explain, + language_threshold=language_threshold, + enable_fallback=enable_fallback, + ) + + return not guesses diff --git a/venv/lib/python3.13/site-packages/charset_normalizer/cd.py b/venv/lib/python3.13/site-packages/charset_normalizer/cd.py new file mode 100644 index 0000000000000000000000000000000000000000..71a3ed5197f788135bb700a15a64594780aa6337 --- /dev/null +++ b/venv/lib/python3.13/site-packages/charset_normalizer/cd.py @@ -0,0 +1,395 @@ +from __future__ import annotations + +import importlib +from codecs import IncrementalDecoder +from collections import Counter +from functools import lru_cache +from typing import Counter as TypeCounter + +from .constant import ( + FREQUENCIES, + KO_NAMES, + LANGUAGE_SUPPORTED_COUNT, + TOO_SMALL_SEQUENCE, + ZH_NAMES, +) +from .md import is_suspiciously_successive_range +from .models import CoherenceMatches +from .utils import ( + is_accentuated, + is_latin, + is_multi_byte_encoding, + is_unicode_range_secondary, + unicode_range, +) + + +def encoding_unicode_range(iana_name: str) -> list[str]: + """ + Return associated unicode ranges in a single byte code page. + """ + if is_multi_byte_encoding(iana_name): + raise OSError("Function not supported on multi-byte code page") + + decoder = importlib.import_module(f"encodings.{iana_name}").IncrementalDecoder + + p: IncrementalDecoder = decoder(errors="ignore") + seen_ranges: dict[str, int] = {} + character_count: int = 0 + + for i in range(0x40, 0xFF): + chunk: str = p.decode(bytes([i])) + + if chunk: + character_range: str | None = unicode_range(chunk) + + if character_range is None: + continue + + if is_unicode_range_secondary(character_range) is False: + if character_range not in seen_ranges: + seen_ranges[character_range] = 0 + seen_ranges[character_range] += 1 + character_count += 1 + + return sorted( + [ + character_range + for character_range in seen_ranges + if seen_ranges[character_range] / character_count >= 0.15 + ] + ) + + +def unicode_range_languages(primary_range: str) -> list[str]: + """ + Return inferred languages used with a unicode range. + """ + languages: list[str] = [] + + for language, characters in FREQUENCIES.items(): + for character in characters: + if unicode_range(character) == primary_range: + languages.append(language) + break + + return languages + + +@lru_cache() +def encoding_languages(iana_name: str) -> list[str]: + """ + Single-byte encoding language association. Some code page are heavily linked to particular language(s). + This function does the correspondence. + """ + unicode_ranges: list[str] = encoding_unicode_range(iana_name) + primary_range: str | None = None + + for specified_range in unicode_ranges: + if "Latin" not in specified_range: + primary_range = specified_range + break + + if primary_range is None: + return ["Latin Based"] + + return unicode_range_languages(primary_range) + + +@lru_cache() +def mb_encoding_languages(iana_name: str) -> list[str]: + """ + Multi-byte encoding language association. Some code page are heavily linked to particular language(s). + This function does the correspondence. + """ + if ( + iana_name.startswith("shift_") + or iana_name.startswith("iso2022_jp") + or iana_name.startswith("euc_j") + or iana_name == "cp932" + ): + return ["Japanese"] + if iana_name.startswith("gb") or iana_name in ZH_NAMES: + return ["Chinese"] + if iana_name.startswith("iso2022_kr") or iana_name in KO_NAMES: + return ["Korean"] + + return [] + + +@lru_cache(maxsize=LANGUAGE_SUPPORTED_COUNT) +def get_target_features(language: str) -> tuple[bool, bool]: + """ + Determine main aspects from a supported language if it contains accents and if is pure Latin. + """ + target_have_accents: bool = False + target_pure_latin: bool = True + + for character in FREQUENCIES[language]: + if not target_have_accents and is_accentuated(character): + target_have_accents = True + if target_pure_latin and is_latin(character) is False: + target_pure_latin = False + + return target_have_accents, target_pure_latin + + +def alphabet_languages( + characters: list[str], ignore_non_latin: bool = False +) -> list[str]: + """ + Return associated languages associated to given characters. + """ + languages: list[tuple[str, float]] = [] + + source_have_accents = any(is_accentuated(character) for character in characters) + + for language, language_characters in FREQUENCIES.items(): + target_have_accents, target_pure_latin = get_target_features(language) + + if ignore_non_latin and target_pure_latin is False: + continue + + if target_have_accents is False and source_have_accents: + continue + + character_count: int = len(language_characters) + + character_match_count: int = len( + [c for c in language_characters if c in characters] + ) + + ratio: float = character_match_count / character_count + + if ratio >= 0.2: + languages.append((language, ratio)) + + languages = sorted(languages, key=lambda x: x[1], reverse=True) + + return [compatible_language[0] for compatible_language in languages] + + +def characters_popularity_compare( + language: str, ordered_characters: list[str] +) -> float: + """ + Determine if a ordered characters list (by occurrence from most appearance to rarest) match a particular language. + The result is a ratio between 0. (absolutely no correspondence) and 1. (near perfect fit). + Beware that is function is not strict on the match in order to ease the detection. (Meaning close match is 1.) + """ + if language not in FREQUENCIES: + raise ValueError(f"{language} not available") + + character_approved_count: int = 0 + FREQUENCIES_language_set = set(FREQUENCIES[language]) + + ordered_characters_count: int = len(ordered_characters) + target_language_characters_count: int = len(FREQUENCIES[language]) + + large_alphabet: bool = target_language_characters_count > 26 + + for character, character_rank in zip( + ordered_characters, range(0, ordered_characters_count) + ): + if character not in FREQUENCIES_language_set: + continue + + character_rank_in_language: int = FREQUENCIES[language].index(character) + expected_projection_ratio: float = ( + target_language_characters_count / ordered_characters_count + ) + character_rank_projection: int = int(character_rank * expected_projection_ratio) + + if ( + large_alphabet is False + and abs(character_rank_projection - character_rank_in_language) > 4 + ): + continue + + if ( + large_alphabet is True + and abs(character_rank_projection - character_rank_in_language) + < target_language_characters_count / 3 + ): + character_approved_count += 1 + continue + + characters_before_source: list[str] = FREQUENCIES[language][ + 0:character_rank_in_language + ] + characters_after_source: list[str] = FREQUENCIES[language][ + character_rank_in_language: + ] + characters_before: list[str] = ordered_characters[0:character_rank] + characters_after: list[str] = ordered_characters[character_rank:] + + before_match_count: int = len( + set(characters_before) & set(characters_before_source) + ) + + after_match_count: int = len( + set(characters_after) & set(characters_after_source) + ) + + if len(characters_before_source) == 0 and before_match_count <= 4: + character_approved_count += 1 + continue + + if len(characters_after_source) == 0 and after_match_count <= 4: + character_approved_count += 1 + continue + + if ( + before_match_count / len(characters_before_source) >= 0.4 + or after_match_count / len(characters_after_source) >= 0.4 + ): + character_approved_count += 1 + continue + + return character_approved_count / len(ordered_characters) + + +def alpha_unicode_split(decoded_sequence: str) -> list[str]: + """ + Given a decoded text sequence, return a list of str. Unicode range / alphabet separation. + Ex. a text containing English/Latin with a bit a Hebrew will return two items in the resulting list; + One containing the latin letters and the other hebrew. + """ + layers: dict[str, str] = {} + + for character in decoded_sequence: + if character.isalpha() is False: + continue + + character_range: str | None = unicode_range(character) + + if character_range is None: + continue + + layer_target_range: str | None = None + + for discovered_range in layers: + if ( + is_suspiciously_successive_range(discovered_range, character_range) + is False + ): + layer_target_range = discovered_range + break + + if layer_target_range is None: + layer_target_range = character_range + + if layer_target_range not in layers: + layers[layer_target_range] = character.lower() + continue + + layers[layer_target_range] += character.lower() + + return list(layers.values()) + + +def merge_coherence_ratios(results: list[CoherenceMatches]) -> CoherenceMatches: + """ + This function merge results previously given by the function coherence_ratio. + The return type is the same as coherence_ratio. + """ + per_language_ratios: dict[str, list[float]] = {} + for result in results: + for sub_result in result: + language, ratio = sub_result + if language not in per_language_ratios: + per_language_ratios[language] = [ratio] + continue + per_language_ratios[language].append(ratio) + + merge = [ + ( + language, + round( + sum(per_language_ratios[language]) / len(per_language_ratios[language]), + 4, + ), + ) + for language in per_language_ratios + ] + + return sorted(merge, key=lambda x: x[1], reverse=True) + + +def filter_alt_coherence_matches(results: CoherenceMatches) -> CoherenceMatches: + """ + We shall NOT return "English—" in CoherenceMatches because it is an alternative + of "English". This function only keeps the best match and remove the em-dash in it. + """ + index_results: dict[str, list[float]] = dict() + + for result in results: + language, ratio = result + no_em_name: str = language.replace("—", "") + + if no_em_name not in index_results: + index_results[no_em_name] = [] + + index_results[no_em_name].append(ratio) + + if any(len(index_results[e]) > 1 for e in index_results): + filtered_results: CoherenceMatches = [] + + for language in index_results: + filtered_results.append((language, max(index_results[language]))) + + return filtered_results + + return results + + +@lru_cache(maxsize=2048) +def coherence_ratio( + decoded_sequence: str, threshold: float = 0.1, lg_inclusion: str | None = None +) -> CoherenceMatches: + """ + Detect ANY language that can be identified in given sequence. The sequence will be analysed by layers. + A layer = Character extraction by alphabets/ranges. + """ + + results: list[tuple[str, float]] = [] + ignore_non_latin: bool = False + + sufficient_match_count: int = 0 + + lg_inclusion_list = lg_inclusion.split(",") if lg_inclusion is not None else [] + if "Latin Based" in lg_inclusion_list: + ignore_non_latin = True + lg_inclusion_list.remove("Latin Based") + + for layer in alpha_unicode_split(decoded_sequence): + sequence_frequencies: TypeCounter[str] = Counter(layer) + most_common = sequence_frequencies.most_common() + + character_count: int = sum(o for c, o in most_common) + + if character_count <= TOO_SMALL_SEQUENCE: + continue + + popular_character_ordered: list[str] = [c for c, o in most_common] + + for language in lg_inclusion_list or alphabet_languages( + popular_character_ordered, ignore_non_latin + ): + ratio: float = characters_popularity_compare( + language, popular_character_ordered + ) + + if ratio < threshold: + continue + elif ratio >= 0.8: + sufficient_match_count += 1 + + results.append((language, round(ratio, 4))) + + if sufficient_match_count >= 3: + break + + return sorted( + filter_alt_coherence_matches(results), key=lambda x: x[1], reverse=True + ) diff --git a/venv/lib/python3.13/site-packages/charset_normalizer/constant.py b/venv/lib/python3.13/site-packages/charset_normalizer/constant.py new file mode 100644 index 0000000000000000000000000000000000000000..cc71a019c7fdd7b99c7fdd5b6bb084ab2840ff63 --- /dev/null +++ b/venv/lib/python3.13/site-packages/charset_normalizer/constant.py @@ -0,0 +1,2015 @@ +from __future__ import annotations + +from codecs import BOM_UTF8, BOM_UTF16_BE, BOM_UTF16_LE, BOM_UTF32_BE, BOM_UTF32_LE +from encodings.aliases import aliases +from re import IGNORECASE +from re import compile as re_compile + +# Contain for each eligible encoding a list of/item bytes SIG/BOM +ENCODING_MARKS: dict[str, bytes | list[bytes]] = { + "utf_8": BOM_UTF8, + "utf_7": [ + b"\x2b\x2f\x76\x38", + b"\x2b\x2f\x76\x39", + b"\x2b\x2f\x76\x2b", + b"\x2b\x2f\x76\x2f", + b"\x2b\x2f\x76\x38\x2d", + ], + "gb18030": b"\x84\x31\x95\x33", + "utf_32": [BOM_UTF32_BE, BOM_UTF32_LE], + "utf_16": [BOM_UTF16_BE, BOM_UTF16_LE], +} + +TOO_SMALL_SEQUENCE: int = 32 +TOO_BIG_SEQUENCE: int = int(10e6) + +UTF8_MAXIMAL_ALLOCATION: int = 1_112_064 + +# Up-to-date Unicode ucd/15.0.0 +UNICODE_RANGES_COMBINED: dict[str, range] = { + "Control character": range(32), + "Basic Latin": range(32, 128), + "Latin-1 Supplement": range(128, 256), + "Latin Extended-A": range(256, 384), + "Latin Extended-B": range(384, 592), + "IPA Extensions": range(592, 688), + "Spacing Modifier Letters": range(688, 768), + "Combining Diacritical Marks": range(768, 880), + "Greek and Coptic": range(880, 1024), + "Cyrillic": range(1024, 1280), + "Cyrillic Supplement": range(1280, 1328), + "Armenian": range(1328, 1424), + "Hebrew": range(1424, 1536), + "Arabic": range(1536, 1792), + "Syriac": range(1792, 1872), + "Arabic Supplement": range(1872, 1920), + "Thaana": range(1920, 1984), + "NKo": range(1984, 2048), + "Samaritan": range(2048, 2112), + "Mandaic": range(2112, 2144), + "Syriac Supplement": range(2144, 2160), + "Arabic Extended-B": range(2160, 2208), + "Arabic Extended-A": range(2208, 2304), + "Devanagari": range(2304, 2432), + "Bengali": range(2432, 2560), + "Gurmukhi": range(2560, 2688), + "Gujarati": range(2688, 2816), + "Oriya": range(2816, 2944), + "Tamil": range(2944, 3072), + "Telugu": range(3072, 3200), + "Kannada": range(3200, 3328), + "Malayalam": range(3328, 3456), + "Sinhala": range(3456, 3584), + "Thai": range(3584, 3712), + "Lao": range(3712, 3840), + "Tibetan": range(3840, 4096), + "Myanmar": range(4096, 4256), + "Georgian": range(4256, 4352), + "Hangul Jamo": range(4352, 4608), + "Ethiopic": range(4608, 4992), + "Ethiopic Supplement": range(4992, 5024), + "Cherokee": range(5024, 5120), + "Unified Canadian Aboriginal Syllabics": range(5120, 5760), + "Ogham": range(5760, 5792), + "Runic": range(5792, 5888), + "Tagalog": range(5888, 5920), + "Hanunoo": range(5920, 5952), + "Buhid": range(5952, 5984), + "Tagbanwa": range(5984, 6016), + "Khmer": range(6016, 6144), + "Mongolian": range(6144, 6320), + "Unified Canadian Aboriginal Syllabics Extended": range(6320, 6400), + "Limbu": range(6400, 6480), + "Tai Le": range(6480, 6528), + "New Tai Lue": range(6528, 6624), + "Khmer Symbols": range(6624, 6656), + "Buginese": range(6656, 6688), + "Tai Tham": range(6688, 6832), + "Combining Diacritical Marks Extended": range(6832, 6912), + "Balinese": range(6912, 7040), + "Sundanese": range(7040, 7104), + "Batak": range(7104, 7168), + "Lepcha": range(7168, 7248), + "Ol Chiki": range(7248, 7296), + "Cyrillic Extended-C": range(7296, 7312), + "Georgian Extended": range(7312, 7360), + "Sundanese Supplement": range(7360, 7376), + "Vedic Extensions": range(7376, 7424), + "Phonetic Extensions": range(7424, 7552), + "Phonetic Extensions Supplement": range(7552, 7616), + "Combining Diacritical Marks Supplement": range(7616, 7680), + "Latin Extended Additional": range(7680, 7936), + "Greek Extended": range(7936, 8192), + "General Punctuation": range(8192, 8304), + "Superscripts and Subscripts": range(8304, 8352), + "Currency Symbols": range(8352, 8400), + "Combining Diacritical Marks for Symbols": range(8400, 8448), + "Letterlike Symbols": range(8448, 8528), + "Number Forms": range(8528, 8592), + "Arrows": range(8592, 8704), + "Mathematical Operators": range(8704, 8960), + "Miscellaneous Technical": range(8960, 9216), + "Control Pictures": range(9216, 9280), + "Optical Character Recognition": range(9280, 9312), + "Enclosed Alphanumerics": range(9312, 9472), + "Box Drawing": range(9472, 9600), + "Block Elements": range(9600, 9632), + "Geometric Shapes": range(9632, 9728), + "Miscellaneous Symbols": range(9728, 9984), + "Dingbats": range(9984, 10176), + "Miscellaneous Mathematical Symbols-A": range(10176, 10224), + "Supplemental Arrows-A": range(10224, 10240), + "Braille Patterns": range(10240, 10496), + "Supplemental Arrows-B": range(10496, 10624), + "Miscellaneous Mathematical Symbols-B": range(10624, 10752), + "Supplemental Mathematical Operators": range(10752, 11008), + "Miscellaneous Symbols and Arrows": range(11008, 11264), + "Glagolitic": range(11264, 11360), + "Latin Extended-C": range(11360, 11392), + "Coptic": range(11392, 11520), + "Georgian Supplement": range(11520, 11568), + "Tifinagh": range(11568, 11648), + "Ethiopic Extended": range(11648, 11744), + "Cyrillic Extended-A": range(11744, 11776), + "Supplemental Punctuation": range(11776, 11904), + "CJK Radicals Supplement": range(11904, 12032), + "Kangxi Radicals": range(12032, 12256), + "Ideographic Description Characters": range(12272, 12288), + "CJK Symbols and Punctuation": range(12288, 12352), + "Hiragana": range(12352, 12448), + "Katakana": range(12448, 12544), + "Bopomofo": range(12544, 12592), + "Hangul Compatibility Jamo": range(12592, 12688), + "Kanbun": range(12688, 12704), + "Bopomofo Extended": range(12704, 12736), + "CJK Strokes": range(12736, 12784), + "Katakana Phonetic Extensions": range(12784, 12800), + "Enclosed CJK Letters and Months": range(12800, 13056), + "CJK Compatibility": range(13056, 13312), + "CJK Unified Ideographs Extension A": range(13312, 19904), + "Yijing Hexagram Symbols": range(19904, 19968), + "CJK Unified Ideographs": range(19968, 40960), + "Yi Syllables": range(40960, 42128), + "Yi Radicals": range(42128, 42192), + "Lisu": range(42192, 42240), + "Vai": range(42240, 42560), + "Cyrillic Extended-B": range(42560, 42656), + "Bamum": range(42656, 42752), + "Modifier Tone Letters": range(42752, 42784), + "Latin Extended-D": range(42784, 43008), + "Syloti Nagri": range(43008, 43056), + "Common Indic Number Forms": range(43056, 43072), + "Phags-pa": range(43072, 43136), + "Saurashtra": range(43136, 43232), + "Devanagari Extended": range(43232, 43264), + "Kayah Li": range(43264, 43312), + "Rejang": range(43312, 43360), + "Hangul Jamo Extended-A": range(43360, 43392), + "Javanese": range(43392, 43488), + "Myanmar Extended-B": range(43488, 43520), + "Cham": range(43520, 43616), + "Myanmar Extended-A": range(43616, 43648), + "Tai Viet": range(43648, 43744), + "Meetei Mayek Extensions": range(43744, 43776), + "Ethiopic Extended-A": range(43776, 43824), + "Latin Extended-E": range(43824, 43888), + "Cherokee Supplement": range(43888, 43968), + "Meetei Mayek": range(43968, 44032), + "Hangul Syllables": range(44032, 55216), + "Hangul Jamo Extended-B": range(55216, 55296), + "High Surrogates": range(55296, 56192), + "High Private Use Surrogates": range(56192, 56320), + "Low Surrogates": range(56320, 57344), + "Private Use Area": range(57344, 63744), + "CJK Compatibility Ideographs": range(63744, 64256), + "Alphabetic Presentation Forms": range(64256, 64336), + "Arabic Presentation Forms-A": range(64336, 65024), + "Variation Selectors": range(65024, 65040), + "Vertical Forms": range(65040, 65056), + "Combining Half Marks": range(65056, 65072), + "CJK Compatibility Forms": range(65072, 65104), + "Small Form Variants": range(65104, 65136), + "Arabic Presentation Forms-B": range(65136, 65280), + "Halfwidth and Fullwidth Forms": range(65280, 65520), + "Specials": range(65520, 65536), + "Linear B Syllabary": range(65536, 65664), + "Linear B Ideograms": range(65664, 65792), + "Aegean Numbers": range(65792, 65856), + "Ancient Greek Numbers": range(65856, 65936), + "Ancient Symbols": range(65936, 66000), + "Phaistos Disc": range(66000, 66048), + "Lycian": range(66176, 66208), + "Carian": range(66208, 66272), + "Coptic Epact Numbers": range(66272, 66304), + "Old Italic": range(66304, 66352), + "Gothic": range(66352, 66384), + "Old Permic": range(66384, 66432), + "Ugaritic": range(66432, 66464), + "Old Persian": range(66464, 66528), + "Deseret": range(66560, 66640), + "Shavian": range(66640, 66688), + "Osmanya": range(66688, 66736), + "Osage": range(66736, 66816), + "Elbasan": range(66816, 66864), + "Caucasian Albanian": range(66864, 66928), + "Vithkuqi": range(66928, 67008), + "Linear A": range(67072, 67456), + "Latin Extended-F": range(67456, 67520), + "Cypriot Syllabary": range(67584, 67648), + "Imperial Aramaic": range(67648, 67680), + "Palmyrene": range(67680, 67712), + "Nabataean": range(67712, 67760), + "Hatran": range(67808, 67840), + "Phoenician": range(67840, 67872), + "Lydian": range(67872, 67904), + "Meroitic Hieroglyphs": range(67968, 68000), + "Meroitic Cursive": range(68000, 68096), + "Kharoshthi": range(68096, 68192), + "Old South Arabian": range(68192, 68224), + "Old North Arabian": range(68224, 68256), + "Manichaean": range(68288, 68352), + "Avestan": range(68352, 68416), + "Inscriptional Parthian": range(68416, 68448), + "Inscriptional Pahlavi": range(68448, 68480), + "Psalter Pahlavi": range(68480, 68528), + "Old Turkic": range(68608, 68688), + "Old Hungarian": range(68736, 68864), + "Hanifi Rohingya": range(68864, 68928), + "Rumi Numeral Symbols": range(69216, 69248), + "Yezidi": range(69248, 69312), + "Arabic Extended-C": range(69312, 69376), + "Old Sogdian": range(69376, 69424), + "Sogdian": range(69424, 69488), + "Old Uyghur": range(69488, 69552), + "Chorasmian": range(69552, 69600), + "Elymaic": range(69600, 69632), + "Brahmi": range(69632, 69760), + "Kaithi": range(69760, 69840), + "Sora Sompeng": range(69840, 69888), + "Chakma": range(69888, 69968), + "Mahajani": range(69968, 70016), + "Sharada": range(70016, 70112), + "Sinhala Archaic Numbers": range(70112, 70144), + "Khojki": range(70144, 70224), + "Multani": range(70272, 70320), + "Khudawadi": range(70320, 70400), + "Grantha": range(70400, 70528), + "Newa": range(70656, 70784), + "Tirhuta": range(70784, 70880), + "Siddham": range(71040, 71168), + "Modi": range(71168, 71264), + "Mongolian Supplement": range(71264, 71296), + "Takri": range(71296, 71376), + "Ahom": range(71424, 71504), + "Dogra": range(71680, 71760), + "Warang Citi": range(71840, 71936), + "Dives Akuru": range(71936, 72032), + "Nandinagari": range(72096, 72192), + "Zanabazar Square": range(72192, 72272), + "Soyombo": range(72272, 72368), + "Unified Canadian Aboriginal Syllabics Extended-A": range(72368, 72384), + "Pau Cin Hau": range(72384, 72448), + "Devanagari Extended-A": range(72448, 72544), + "Bhaiksuki": range(72704, 72816), + "Marchen": range(72816, 72896), + "Masaram Gondi": range(72960, 73056), + "Gunjala Gondi": range(73056, 73136), + "Makasar": range(73440, 73472), + "Kawi": range(73472, 73568), + "Lisu Supplement": range(73648, 73664), + "Tamil Supplement": range(73664, 73728), + "Cuneiform": range(73728, 74752), + "Cuneiform Numbers and Punctuation": range(74752, 74880), + "Early Dynastic Cuneiform": range(74880, 75088), + "Cypro-Minoan": range(77712, 77824), + "Egyptian Hieroglyphs": range(77824, 78896), + "Egyptian Hieroglyph Format Controls": range(78896, 78944), + "Anatolian Hieroglyphs": range(82944, 83584), + "Bamum Supplement": range(92160, 92736), + "Mro": range(92736, 92784), + "Tangsa": range(92784, 92880), + "Bassa Vah": range(92880, 92928), + "Pahawh Hmong": range(92928, 93072), + "Medefaidrin": range(93760, 93856), + "Miao": range(93952, 94112), + "Ideographic Symbols and Punctuation": range(94176, 94208), + "Tangut": range(94208, 100352), + "Tangut Components": range(100352, 101120), + "Khitan Small Script": range(101120, 101632), + "Tangut Supplement": range(101632, 101760), + "Kana Extended-B": range(110576, 110592), + "Kana Supplement": range(110592, 110848), + "Kana Extended-A": range(110848, 110896), + "Small Kana Extension": range(110896, 110960), + "Nushu": range(110960, 111360), + "Duployan": range(113664, 113824), + "Shorthand Format Controls": range(113824, 113840), + "Znamenny Musical Notation": range(118528, 118736), + "Byzantine Musical Symbols": range(118784, 119040), + "Musical Symbols": range(119040, 119296), + "Ancient Greek Musical Notation": range(119296, 119376), + "Kaktovik Numerals": range(119488, 119520), + "Mayan Numerals": range(119520, 119552), + "Tai Xuan Jing Symbols": range(119552, 119648), + "Counting Rod Numerals": range(119648, 119680), + "Mathematical Alphanumeric Symbols": range(119808, 120832), + "Sutton SignWriting": range(120832, 121520), + "Latin Extended-G": range(122624, 122880), + "Glagolitic Supplement": range(122880, 122928), + "Cyrillic Extended-D": range(122928, 123024), + "Nyiakeng Puachue Hmong": range(123136, 123216), + "Toto": range(123536, 123584), + "Wancho": range(123584, 123648), + "Nag Mundari": range(124112, 124160), + "Ethiopic Extended-B": range(124896, 124928), + "Mende Kikakui": range(124928, 125152), + "Adlam": range(125184, 125280), + "Indic Siyaq Numbers": range(126064, 126144), + "Ottoman Siyaq Numbers": range(126208, 126288), + "Arabic Mathematical Alphabetic Symbols": range(126464, 126720), + "Mahjong Tiles": range(126976, 127024), + "Domino Tiles": range(127024, 127136), + "Playing Cards": range(127136, 127232), + "Enclosed Alphanumeric Supplement": range(127232, 127488), + "Enclosed Ideographic Supplement": range(127488, 127744), + "Miscellaneous Symbols and Pictographs": range(127744, 128512), + "Emoticons range(Emoji)": range(128512, 128592), + "Ornamental Dingbats": range(128592, 128640), + "Transport and Map Symbols": range(128640, 128768), + "Alchemical Symbols": range(128768, 128896), + "Geometric Shapes Extended": range(128896, 129024), + "Supplemental Arrows-C": range(129024, 129280), + "Supplemental Symbols and Pictographs": range(129280, 129536), + "Chess Symbols": range(129536, 129648), + "Symbols and Pictographs Extended-A": range(129648, 129792), + "Symbols for Legacy Computing": range(129792, 130048), + "CJK Unified Ideographs Extension B": range(131072, 173792), + "CJK Unified Ideographs Extension C": range(173824, 177984), + "CJK Unified Ideographs Extension D": range(177984, 178208), + "CJK Unified Ideographs Extension E": range(178208, 183984), + "CJK Unified Ideographs Extension F": range(183984, 191472), + "CJK Compatibility Ideographs Supplement": range(194560, 195104), + "CJK Unified Ideographs Extension G": range(196608, 201552), + "CJK Unified Ideographs Extension H": range(201552, 205744), + "Tags": range(917504, 917632), + "Variation Selectors Supplement": range(917760, 918000), + "Supplementary Private Use Area-A": range(983040, 1048576), + "Supplementary Private Use Area-B": range(1048576, 1114112), +} + + +UNICODE_SECONDARY_RANGE_KEYWORD: list[str] = [ + "Supplement", + "Extended", + "Extensions", + "Modifier", + "Marks", + "Punctuation", + "Symbols", + "Forms", + "Operators", + "Miscellaneous", + "Drawing", + "Block", + "Shapes", + "Supplemental", + "Tags", +] + +RE_POSSIBLE_ENCODING_INDICATION = re_compile( + r"(?:(?:encoding)|(?:charset)|(?:coding))(?:[\:= ]{1,10})(?:[\"\']?)([a-zA-Z0-9\-_]+)(?:[\"\']?)", + IGNORECASE, +) + +IANA_NO_ALIASES = [ + "cp720", + "cp737", + "cp856", + "cp874", + "cp875", + "cp1006", + "koi8_r", + "koi8_t", + "koi8_u", +] + +IANA_SUPPORTED: list[str] = sorted( + filter( + lambda x: x.endswith("_codec") is False + and x not in {"rot_13", "tactis", "mbcs"}, + list(set(aliases.values())) + IANA_NO_ALIASES, + ) +) + +IANA_SUPPORTED_COUNT: int = len(IANA_SUPPORTED) + +# pre-computed code page that are similar using the function cp_similarity. +IANA_SUPPORTED_SIMILAR: dict[str, list[str]] = { + "cp037": ["cp1026", "cp1140", "cp273", "cp500"], + "cp1026": ["cp037", "cp1140", "cp273", "cp500"], + "cp1125": ["cp866"], + "cp1140": ["cp037", "cp1026", "cp273", "cp500"], + "cp1250": ["iso8859_2"], + "cp1251": ["kz1048", "ptcp154"], + "cp1252": ["iso8859_15", "iso8859_9", "latin_1"], + "cp1253": ["iso8859_7"], + "cp1254": ["iso8859_15", "iso8859_9", "latin_1"], + "cp1257": ["iso8859_13"], + "cp273": ["cp037", "cp1026", "cp1140", "cp500"], + "cp437": ["cp850", "cp858", "cp860", "cp861", "cp862", "cp863", "cp865"], + "cp500": ["cp037", "cp1026", "cp1140", "cp273"], + "cp850": ["cp437", "cp857", "cp858", "cp865"], + "cp857": ["cp850", "cp858", "cp865"], + "cp858": ["cp437", "cp850", "cp857", "cp865"], + "cp860": ["cp437", "cp861", "cp862", "cp863", "cp865"], + "cp861": ["cp437", "cp860", "cp862", "cp863", "cp865"], + "cp862": ["cp437", "cp860", "cp861", "cp863", "cp865"], + "cp863": ["cp437", "cp860", "cp861", "cp862", "cp865"], + "cp865": ["cp437", "cp850", "cp857", "cp858", "cp860", "cp861", "cp862", "cp863"], + "cp866": ["cp1125"], + "iso8859_10": ["iso8859_14", "iso8859_15", "iso8859_4", "iso8859_9", "latin_1"], + "iso8859_11": ["tis_620"], + "iso8859_13": ["cp1257"], + "iso8859_14": [ + "iso8859_10", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_15": [ + "cp1252", + "cp1254", + "iso8859_10", + "iso8859_14", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_16": [ + "iso8859_14", + "iso8859_15", + "iso8859_2", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_2": ["cp1250", "iso8859_16", "iso8859_4"], + "iso8859_3": ["iso8859_14", "iso8859_15", "iso8859_16", "iso8859_9", "latin_1"], + "iso8859_4": ["iso8859_10", "iso8859_2", "iso8859_9", "latin_1"], + "iso8859_7": ["cp1253"], + "iso8859_9": [ + "cp1252", + "cp1254", + "cp1258", + "iso8859_10", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_4", + "latin_1", + ], + "kz1048": ["cp1251", "ptcp154"], + "latin_1": [ + "cp1252", + "cp1254", + "cp1258", + "iso8859_10", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_4", + "iso8859_9", + ], + "mac_iceland": ["mac_roman", "mac_turkish"], + "mac_roman": ["mac_iceland", "mac_turkish"], + "mac_turkish": ["mac_iceland", "mac_roman"], + "ptcp154": ["cp1251", "kz1048"], + "tis_620": ["iso8859_11"], +} + + +CHARDET_CORRESPONDENCE: dict[str, str] = { + "iso2022_kr": "ISO-2022-KR", + "iso2022_jp": "ISO-2022-JP", + "euc_kr": "EUC-KR", + "tis_620": "TIS-620", + "utf_32": "UTF-32", + "euc_jp": "EUC-JP", + "koi8_r": "KOI8-R", + "iso8859_1": "ISO-8859-1", + "iso8859_2": "ISO-8859-2", + "iso8859_5": "ISO-8859-5", + "iso8859_6": "ISO-8859-6", + "iso8859_7": "ISO-8859-7", + "iso8859_8": "ISO-8859-8", + "utf_16": "UTF-16", + "cp855": "IBM855", + "mac_cyrillic": "MacCyrillic", + "gb2312": "GB2312", + "gb18030": "GB18030", + "cp932": "CP932", + "cp866": "IBM866", + "utf_8": "utf-8", + "utf_8_sig": "UTF-8-SIG", + "shift_jis": "SHIFT_JIS", + "big5": "Big5", + "cp1250": "windows-1250", + "cp1251": "windows-1251", + "cp1252": "Windows-1252", + "cp1253": "windows-1253", + "cp1255": "windows-1255", + "cp1256": "windows-1256", + "cp1254": "Windows-1254", + "cp949": "CP949", +} + + +COMMON_SAFE_ASCII_CHARACTERS: set[str] = { + "<", + ">", + "=", + ":", + "/", + "&", + ";", + "{", + "}", + "[", + "]", + ",", + "|", + '"', + "-", + "(", + ")", +} + +# Sample character sets — replace with full lists if needed +COMMON_CHINESE_CHARACTERS = "的一是在不了有和人这中大为上个国我以要他时来用们生到作地于出就分对成会可主发年动同工也能下过子说产种面而方后多定行学法所民得经十三之进着等部度家电力里如水化高自二理起小物现实加量都两体制机当使点从业本去把性好应开它合还因由其些然前外天政四日那社义事平形相全表间样与关各重新线内数正心反你明看原又么利比或但质气第向道命此变条只没结解问意建月公无系军很情者最立代想已通并提直题党程展五果料象员革位入常文总次品式活设及管特件长求老头基资边流路级少图山统接知较将组见计别她手角期根论运农指几九区强放决西被干做必战先回则任取据处队南给色光门即保治北造百规热领七海口东导器压志世金增争济阶油思术极交受联什认六共权收证改清己美再采转更单风切打白教速花带安场身车例真务具万每目至达走积示议声报斗完类八离华名确才科张信马节话米整空元况今集温传土许步群广石记需段研界拉林律叫且究观越织装影算低持音众书布复容儿须际商非验连断深难近矿千周委素技备半办青省列习响约支般史感劳便团往酸历市克何除消构府太准精值号率族维划选标写存候毛亲快效斯院查江型眼王按格养易置派层片始却专状育厂京识适属圆包火住调满县局照参红细引听该铁价严龙飞" + +COMMON_JAPANESE_CHARACTERS = "日一国年大十二本中長出三時行見月分後前生五間上東四今金九入学高円子外八六下来気小七山話女北午百書先名川千水半男西電校語土木聞食車何南万毎白天母火右読友左休父雨" + +COMMON_KOREAN_CHARACTERS = "一二三四五六七八九十百千萬上下左右中人女子大小山川日月火水木金土父母天地國名年時文校學生" + +# Combine all into a set +COMMON_CJK_CHARACTERS = set( + "".join( + [ + COMMON_CHINESE_CHARACTERS, + COMMON_JAPANESE_CHARACTERS, + COMMON_KOREAN_CHARACTERS, + ] + ) +) + +KO_NAMES: set[str] = {"johab", "cp949", "euc_kr"} +ZH_NAMES: set[str] = {"big5", "cp950", "big5hkscs", "hz"} + +# Logging LEVEL below DEBUG +TRACE: int = 5 + + +# Language label that contain the em dash "—" +# character are to be considered alternative seq to origin +FREQUENCIES: dict[str, list[str]] = { + "English": [ + "e", + "a", + "t", + "i", + "o", + "n", + "s", + "r", + "h", + "l", + "d", + "c", + "u", + "m", + "f", + "p", + "g", + "w", + "y", + "b", + "v", + "k", + "x", + "j", + "z", + "q", + ], + "English—": [ + "e", + "a", + "t", + "i", + "o", + "n", + "s", + "r", + "h", + "l", + "d", + "c", + "m", + "u", + "f", + "p", + "g", + "w", + "b", + "y", + "v", + "k", + "j", + "x", + "z", + "q", + ], + "German": [ + "e", + "n", + "i", + "r", + "s", + "t", + "a", + "d", + "h", + "u", + "l", + "g", + "o", + "c", + "m", + "b", + "f", + "k", + "w", + "z", + "p", + "v", + "ü", + "ä", + "ö", + "j", + ], + "French": [ + "e", + "a", + "s", + "n", + "i", + "t", + "r", + "l", + "u", + "o", + "d", + "c", + "p", + "m", + "é", + "v", + "g", + "f", + "b", + "h", + "q", + "à", + "x", + "è", + "y", + "j", + ], + "Dutch": [ + "e", + "n", + "a", + "i", + "r", + "t", + "o", + "d", + "s", + "l", + "g", + "h", + "v", + "m", + "u", + "k", + "c", + "p", + "b", + "w", + "j", + "z", + "f", + "y", + "x", + "ë", + ], + "Italian": [ + "e", + "i", + "a", + "o", + "n", + "l", + "t", + "r", + "s", + "c", + "d", + "u", + "p", + "m", + "g", + "v", + "f", + "b", + "z", + "h", + "q", + "è", + "à", + "k", + "y", + "ò", + ], + "Polish": [ + "a", + "i", + "o", + "e", + "n", + "r", + "z", + "w", + "s", + "c", + "t", + "k", + "y", + "d", + "p", + "m", + "u", + "l", + "j", + "ł", + "g", + "b", + "h", + "ą", + "ę", + "ó", + ], + "Spanish": [ + "e", + "a", + "o", + "n", + "s", + "r", + "i", + "l", + "d", + "t", + "c", + "u", + "m", + "p", + "b", + "g", + "v", + "f", + "y", + "ó", + "h", + "q", + "í", + "j", + "z", + "á", + ], + "Russian": [ + "о", + "а", + "е", + "и", + "н", + "с", + "т", + "р", + "в", + "л", + "к", + "м", + "д", + "п", + "у", + "г", + "я", + "ы", + "з", + "б", + "й", + "ь", + "ч", + "х", + "ж", + "ц", + ], + # Jap-Kanji + "Japanese": [ + "人", + "一", + "大", + "亅", + "丁", + "丨", + "竹", + "笑", + "口", + "日", + "今", + "二", + "彳", + "行", + "十", + "土", + "丶", + "寸", + "寺", + "時", + "乙", + "丿", + "乂", + "气", + "気", + "冂", + "巾", + "亠", + "市", + "目", + "儿", + "見", + "八", + "小", + "凵", + "県", + "月", + "彐", + "門", + "間", + "木", + "東", + "山", + "出", + "本", + "中", + "刀", + "分", + "耳", + "又", + "取", + "最", + "言", + "田", + "心", + "思", + "刂", + "前", + "京", + "尹", + "事", + "生", + "厶", + "云", + "会", + "未", + "来", + "白", + "冫", + "楽", + "灬", + "馬", + "尸", + "尺", + "駅", + "明", + "耂", + "者", + "了", + "阝", + "都", + "高", + "卜", + "占", + "厂", + "广", + "店", + "子", + "申", + "奄", + "亻", + "俺", + "上", + "方", + "冖", + "学", + "衣", + "艮", + "食", + "自", + ], + # Jap-Katakana + "Japanese—": [ + "ー", + "ン", + "ス", + "・", + "ル", + "ト", + "リ", + "イ", + "ア", + "ラ", + "ッ", + "ク", + "ド", + "シ", + "レ", + "ジ", + "タ", + "フ", + "ロ", + "カ", + "テ", + "マ", + "ィ", + "グ", + "バ", + "ム", + "プ", + "オ", + "コ", + "デ", + "ニ", + "ウ", + "メ", + "サ", + "ビ", + "ナ", + "ブ", + "ャ", + "エ", + "ュ", + "チ", + "キ", + "ズ", + "ダ", + "パ", + "ミ", + "ェ", + "ョ", + "ハ", + "セ", + "ベ", + "ガ", + "モ", + "ツ", + "ネ", + "ボ", + "ソ", + "ノ", + "ァ", + "ヴ", + "ワ", + "ポ", + "ペ", + "ピ", + "ケ", + "ゴ", + "ギ", + "ザ", + "ホ", + "ゲ", + "ォ", + "ヤ", + "ヒ", + "ユ", + "ヨ", + "ヘ", + "ゼ", + "ヌ", + "ゥ", + "ゾ", + "ヶ", + "ヂ", + "ヲ", + "ヅ", + "ヵ", + "ヱ", + "ヰ", + "ヮ", + "ヽ", + "゠", + "ヾ", + "ヷ", + "ヿ", + "ヸ", + "ヹ", + "ヺ", + ], + # Jap-Hiragana + "Japanese——": [ + "の", + "に", + "る", + "た", + "と", + "は", + "し", + "い", + "を", + "で", + "て", + "が", + "な", + "れ", + "か", + "ら", + "さ", + "っ", + "り", + "す", + "あ", + "も", + "こ", + "ま", + "う", + "く", + "よ", + "き", + "ん", + "め", + "お", + "け", + "そ", + "つ", + "だ", + "や", + "え", + "ど", + "わ", + "ち", + "み", + "せ", + "じ", + "ば", + "へ", + "び", + "ず", + "ろ", + "ほ", + "げ", + "む", + "べ", + "ひ", + "ょ", + "ゆ", + "ぶ", + "ご", + "ゃ", + "ね", + "ふ", + "ぐ", + "ぎ", + "ぼ", + "ゅ", + "づ", + "ざ", + "ぞ", + "ぬ", + "ぜ", + "ぱ", + "ぽ", + "ぷ", + "ぴ", + "ぃ", + "ぁ", + "ぇ", + "ぺ", + "ゞ", + "ぢ", + "ぉ", + "ぅ", + "ゐ", + "ゝ", + "ゑ", + "゛", + "゜", + "ゎ", + "ゔ", + "゚", + "ゟ", + "゙", + "ゕ", + "ゖ", + ], + "Portuguese": [ + "a", + "e", + "o", + "s", + "i", + "r", + "d", + "n", + "t", + "m", + "u", + "c", + "l", + "p", + "g", + "v", + "b", + "f", + "h", + "ã", + "q", + "é", + "ç", + "á", + "z", + "í", + ], + "Swedish": [ + "e", + "a", + "n", + "r", + "t", + "s", + "i", + "l", + "d", + "o", + "m", + "k", + "g", + "v", + "h", + "f", + "u", + "p", + "ä", + "c", + "b", + "ö", + "å", + "y", + "j", + "x", + ], + "Chinese": [ + "的", + "一", + "是", + "不", + "了", + "在", + "人", + "有", + "我", + "他", + "这", + "个", + "们", + "中", + "来", + "上", + "大", + "为", + "和", + "国", + "地", + "到", + "以", + "说", + "时", + "要", + "就", + "出", + "会", + "可", + "也", + "你", + "对", + "生", + "能", + "而", + "子", + "那", + "得", + "于", + "着", + "下", + "自", + "之", + "年", + "过", + "发", + "后", + "作", + "里", + "用", + "道", + "行", + "所", + "然", + "家", + "种", + "事", + "成", + "方", + "多", + "经", + "么", + "去", + "法", + "学", + "如", + "都", + "同", + "现", + "当", + "没", + "动", + "面", + "起", + "看", + "定", + "天", + "分", + "还", + "进", + "好", + "小", + "部", + "其", + "些", + "主", + "样", + "理", + "心", + "她", + "本", + "前", + "开", + "但", + "因", + "只", + "从", + "想", + "实", + ], + "Ukrainian": [ + "о", + "а", + "н", + "і", + "и", + "р", + "в", + "т", + "е", + "с", + "к", + "л", + "у", + "д", + "м", + "п", + "з", + "я", + "ь", + "б", + "г", + "й", + "ч", + "х", + "ц", + "ї", + ], + "Norwegian": [ + "e", + "r", + "n", + "t", + "a", + "s", + "i", + "o", + "l", + "d", + "g", + "k", + "m", + "v", + "f", + "p", + "u", + "b", + "h", + "å", + "y", + "j", + "ø", + "c", + "æ", + "w", + ], + "Finnish": [ + "a", + "i", + "n", + "t", + "e", + "s", + "l", + "o", + "u", + "k", + "ä", + "m", + "r", + "v", + "j", + "h", + "p", + "y", + "d", + "ö", + "g", + "c", + "b", + "f", + "w", + "z", + ], + "Vietnamese": [ + "n", + "h", + "t", + "i", + "c", + "g", + "a", + "o", + "u", + "m", + "l", + "r", + "à", + "đ", + "s", + "e", + "v", + "p", + "b", + "y", + "ư", + "d", + "á", + "k", + "ộ", + "ế", + ], + "Czech": [ + "o", + "e", + "a", + "n", + "t", + "s", + "i", + "l", + "v", + "r", + "k", + "d", + "u", + "m", + "p", + "í", + "c", + "h", + "z", + "á", + "y", + "j", + "b", + "ě", + "é", + "ř", + ], + "Hungarian": [ + "e", + "a", + "t", + "l", + "s", + "n", + "k", + "r", + "i", + "o", + "z", + "á", + "é", + "g", + "m", + "b", + "y", + "v", + "d", + "h", + "u", + "p", + "j", + "ö", + "f", + "c", + ], + "Korean": [ + "이", + "다", + "에", + "의", + "는", + "로", + "하", + "을", + "가", + "고", + "지", + "서", + "한", + "은", + "기", + "으", + "년", + "대", + "사", + "시", + "를", + "리", + "도", + "인", + "스", + "일", + ], + "Indonesian": [ + "a", + "n", + "e", + "i", + "r", + "t", + "u", + "s", + "d", + "k", + "m", + "l", + "g", + "p", + "b", + "o", + "h", + "y", + "j", + "c", + "w", + "f", + "v", + "z", + "x", + "q", + ], + "Turkish": [ + "a", + "e", + "i", + "n", + "r", + "l", + "ı", + "k", + "d", + "t", + "s", + "m", + "y", + "u", + "o", + "b", + "ü", + "ş", + "v", + "g", + "z", + "h", + "c", + "p", + "ç", + "ğ", + ], + "Romanian": [ + "e", + "i", + "a", + "r", + "n", + "t", + "u", + "l", + "o", + "c", + "s", + "d", + "p", + "m", + "ă", + "f", + "v", + "î", + "g", + "b", + "ș", + "ț", + "z", + "h", + "â", + "j", + ], + "Farsi": [ + "ا", + "ی", + "ر", + "د", + "ن", + "ه", + "و", + "م", + "ت", + "ب", + "س", + "ل", + "ک", + "ش", + "ز", + "ف", + "گ", + "ع", + "خ", + "ق", + "ج", + "آ", + "پ", + "ح", + "ط", + "ص", + ], + "Arabic": [ + "ا", + "ل", + "ي", + "م", + "و", + "ن", + "ر", + "ت", + "ب", + "ة", + "ع", + "د", + "س", + "ف", + "ه", + "ك", + "ق", + "أ", + "ح", + "ج", + "ش", + "ط", + "ص", + "ى", + "خ", + "إ", + ], + "Danish": [ + "e", + "r", + "n", + "t", + "a", + "i", + "s", + "d", + "l", + "o", + "g", + "m", + "k", + "f", + "v", + "u", + "b", + "h", + "p", + "å", + "y", + "ø", + "æ", + "c", + "j", + "w", + ], + "Serbian": [ + "а", + "и", + "о", + "е", + "н", + "р", + "с", + "у", + "т", + "к", + "ј", + "в", + "д", + "м", + "п", + "л", + "г", + "з", + "б", + "a", + "i", + "e", + "o", + "n", + "ц", + "ш", + ], + "Lithuanian": [ + "i", + "a", + "s", + "o", + "r", + "e", + "t", + "n", + "u", + "k", + "m", + "l", + "p", + "v", + "d", + "j", + "g", + "ė", + "b", + "y", + "ų", + "š", + "ž", + "c", + "ą", + "į", + ], + "Slovene": [ + "e", + "a", + "i", + "o", + "n", + "r", + "s", + "l", + "t", + "j", + "v", + "k", + "d", + "p", + "m", + "u", + "z", + "b", + "g", + "h", + "č", + "c", + "š", + "ž", + "f", + "y", + ], + "Slovak": [ + "o", + "a", + "e", + "n", + "i", + "r", + "v", + "t", + "s", + "l", + "k", + "d", + "m", + "p", + "u", + "c", + "h", + "j", + "b", + "z", + "á", + "y", + "ý", + "í", + "č", + "é", + ], + "Hebrew": [ + "י", + "ו", + "ה", + "ל", + "ר", + "ב", + "ת", + "מ", + "א", + "ש", + "נ", + "ע", + "ם", + "ד", + "ק", + "ח", + "פ", + "ס", + "כ", + "ג", + "ט", + "צ", + "ן", + "ז", + "ך", + ], + "Bulgarian": [ + "а", + "и", + "о", + "е", + "н", + "т", + "р", + "с", + "в", + "л", + "к", + "д", + "п", + "м", + "з", + "г", + "я", + "ъ", + "у", + "б", + "ч", + "ц", + "й", + "ж", + "щ", + "х", + ], + "Croatian": [ + "a", + "i", + "o", + "e", + "n", + "r", + "j", + "s", + "t", + "u", + "k", + "l", + "v", + "d", + "m", + "p", + "g", + "z", + "b", + "c", + "č", + "h", + "š", + "ž", + "ć", + "f", + ], + "Hindi": [ + "क", + "र", + "स", + "न", + "त", + "म", + "ह", + "प", + "य", + "ल", + "व", + "ज", + "द", + "ग", + "ब", + "श", + "ट", + "अ", + "ए", + "थ", + "भ", + "ड", + "च", + "ध", + "ष", + "इ", + ], + "Estonian": [ + "a", + "i", + "e", + "s", + "t", + "l", + "u", + "n", + "o", + "k", + "r", + "d", + "m", + "v", + "g", + "p", + "j", + "h", + "ä", + "b", + "õ", + "ü", + "f", + "c", + "ö", + "y", + ], + "Thai": [ + "า", + "น", + "ร", + "อ", + "ก", + "เ", + "ง", + "ม", + "ย", + "ล", + "ว", + "ด", + "ท", + "ส", + "ต", + "ะ", + "ป", + "บ", + "ค", + "ห", + "แ", + "จ", + "พ", + "ช", + "ข", + "ใ", + ], + "Greek": [ + "α", + "τ", + "ο", + "ι", + "ε", + "ν", + "ρ", + "σ", + "κ", + "η", + "π", + "ς", + "υ", + "μ", + "λ", + "ί", + "ό", + "ά", + "γ", + "έ", + "δ", + "ή", + "ω", + "χ", + "θ", + "ύ", + ], + "Tamil": [ + "க", + "த", + "ப", + "ட", + "ர", + "ம", + "ல", + "ன", + "வ", + "ற", + "ய", + "ள", + "ச", + "ந", + "இ", + "ண", + "அ", + "ஆ", + "ழ", + "ங", + "எ", + "உ", + "ஒ", + "ஸ", + ], + "Kazakh": [ + "а", + "ы", + "е", + "н", + "т", + "р", + "л", + "і", + "д", + "с", + "м", + "қ", + "к", + "о", + "б", + "и", + "у", + "ғ", + "ж", + "ң", + "з", + "ш", + "й", + "п", + "г", + "ө", + ], +} + +LANGUAGE_SUPPORTED_COUNT: int = len(FREQUENCIES) diff --git a/venv/lib/python3.13/site-packages/charset_normalizer/legacy.py b/venv/lib/python3.13/site-packages/charset_normalizer/legacy.py new file mode 100644 index 0000000000000000000000000000000000000000..360a3107bdce817594db33de158e71d9fe793a3b --- /dev/null +++ b/venv/lib/python3.13/site-packages/charset_normalizer/legacy.py @@ -0,0 +1,80 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any +from warnings import warn + +from .api import from_bytes +from .constant import CHARDET_CORRESPONDENCE, TOO_SMALL_SEQUENCE + +# TODO: remove this check when dropping Python 3.7 support +if TYPE_CHECKING: + from typing_extensions import TypedDict + + class ResultDict(TypedDict): + encoding: str | None + language: str + confidence: float | None + + +def detect( + byte_str: bytes, should_rename_legacy: bool = False, **kwargs: Any +) -> ResultDict: + """ + chardet legacy method + Detect the encoding of the given byte string. It should be mostly backward-compatible. + Encoding name will match Chardet own writing whenever possible. (Not on encoding name unsupported by it) + This function is deprecated and should be used to migrate your project easily, consult the documentation for + further information. Not planned for removal. + + :param byte_str: The byte sequence to examine. + :param should_rename_legacy: Should we rename legacy encodings + to their more modern equivalents? + """ + if len(kwargs): + warn( + f"charset-normalizer disregard arguments '{','.join(list(kwargs.keys()))}' in legacy function detect()" + ) + + if not isinstance(byte_str, (bytearray, bytes)): + raise TypeError( # pragma: nocover + f"Expected object of type bytes or bytearray, got: {type(byte_str)}" + ) + + if isinstance(byte_str, bytearray): + byte_str = bytes(byte_str) + + r = from_bytes(byte_str).best() + + encoding = r.encoding if r is not None else None + language = r.language if r is not None and r.language != "Unknown" else "" + confidence = 1.0 - r.chaos if r is not None else None + + # automatically lower confidence + # on small bytes samples. + # https://github.com/jawah/charset_normalizer/issues/391 + if ( + confidence is not None + and confidence >= 0.9 + and encoding + not in { + "utf_8", + "ascii", + } + and r.bom is False # type: ignore[union-attr] + and len(byte_str) < TOO_SMALL_SEQUENCE + ): + confidence -= 0.2 + + # Note: CharsetNormalizer does not return 'UTF-8-SIG' as the sig get stripped in the detection/normalization process + # but chardet does return 'utf-8-sig' and it is a valid codec name. + if r is not None and encoding == "utf_8" and r.bom: + encoding += "_sig" + + if should_rename_legacy is False and encoding in CHARDET_CORRESPONDENCE: + encoding = CHARDET_CORRESPONDENCE[encoding] + + return { + "encoding": encoding, + "language": language, + "confidence": confidence, + } diff --git a/venv/lib/python3.13/site-packages/charset_normalizer/md.cpython-313-x86_64-linux-gnu.so b/venv/lib/python3.13/site-packages/charset_normalizer/md.cpython-313-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..857d7474b7a5a0c5b81989346cf9d845174eed68 Binary files /dev/null and b/venv/lib/python3.13/site-packages/charset_normalizer/md.cpython-313-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.13/site-packages/charset_normalizer/md.py b/venv/lib/python3.13/site-packages/charset_normalizer/md.py new file mode 100644 index 0000000000000000000000000000000000000000..12ce024bd34c7eee369ddf438f5534e855a59839 --- /dev/null +++ b/venv/lib/python3.13/site-packages/charset_normalizer/md.py @@ -0,0 +1,635 @@ +from __future__ import annotations + +from functools import lru_cache +from logging import getLogger + +from .constant import ( + COMMON_SAFE_ASCII_CHARACTERS, + TRACE, + UNICODE_SECONDARY_RANGE_KEYWORD, +) +from .utils import ( + is_accentuated, + is_arabic, + is_arabic_isolated_form, + is_case_variable, + is_cjk, + is_emoticon, + is_hangul, + is_hiragana, + is_katakana, + is_latin, + is_punctuation, + is_separator, + is_symbol, + is_thai, + is_unprintable, + remove_accent, + unicode_range, + is_cjk_uncommon, +) + + +class MessDetectorPlugin: + """ + Base abstract class used for mess detection plugins. + All detectors MUST extend and implement given methods. + """ + + def eligible(self, character: str) -> bool: + """ + Determine if given character should be fed in. + """ + raise NotImplementedError # pragma: nocover + + def feed(self, character: str) -> None: + """ + The main routine to be executed upon character. + Insert the logic in witch the text would be considered chaotic. + """ + raise NotImplementedError # pragma: nocover + + def reset(self) -> None: # pragma: no cover + """ + Permit to reset the plugin to the initial state. + """ + raise NotImplementedError + + @property + def ratio(self) -> float: + """ + Compute the chaos ratio based on what your feed() has seen. + Must NOT be lower than 0.; No restriction gt 0. + """ + raise NotImplementedError # pragma: nocover + + +class TooManySymbolOrPunctuationPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._punctuation_count: int = 0 + self._symbol_count: int = 0 + self._character_count: int = 0 + + self._last_printable_char: str | None = None + self._frenzy_symbol_in_word: bool = False + + def eligible(self, character: str) -> bool: + return character.isprintable() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if ( + character != self._last_printable_char + and character not in COMMON_SAFE_ASCII_CHARACTERS + ): + if is_punctuation(character): + self._punctuation_count += 1 + elif ( + character.isdigit() is False + and is_symbol(character) + and is_emoticon(character) is False + ): + self._symbol_count += 2 + + self._last_printable_char = character + + def reset(self) -> None: # Abstract + self._punctuation_count = 0 + self._character_count = 0 + self._symbol_count = 0 + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + ratio_of_punctuation: float = ( + self._punctuation_count + self._symbol_count + ) / self._character_count + + return ratio_of_punctuation if ratio_of_punctuation >= 0.3 else 0.0 + + +class TooManyAccentuatedPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._character_count: int = 0 + self._accentuated_count: int = 0 + + def eligible(self, character: str) -> bool: + return character.isalpha() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if is_accentuated(character): + self._accentuated_count += 1 + + def reset(self) -> None: # Abstract + self._character_count = 0 + self._accentuated_count = 0 + + @property + def ratio(self) -> float: + if self._character_count < 8: + return 0.0 + + ratio_of_accentuation: float = self._accentuated_count / self._character_count + return ratio_of_accentuation if ratio_of_accentuation >= 0.35 else 0.0 + + +class UnprintablePlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._unprintable_count: int = 0 + self._character_count: int = 0 + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + if is_unprintable(character): + self._unprintable_count += 1 + self._character_count += 1 + + def reset(self) -> None: # Abstract + self._unprintable_count = 0 + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return (self._unprintable_count * 8) / self._character_count + + +class SuspiciousDuplicateAccentPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._successive_count: int = 0 + self._character_count: int = 0 + + self._last_latin_character: str | None = None + + def eligible(self, character: str) -> bool: + return character.isalpha() and is_latin(character) + + def feed(self, character: str) -> None: + self._character_count += 1 + if ( + self._last_latin_character is not None + and is_accentuated(character) + and is_accentuated(self._last_latin_character) + ): + if character.isupper() and self._last_latin_character.isupper(): + self._successive_count += 1 + # Worse if its the same char duplicated with different accent. + if remove_accent(character) == remove_accent(self._last_latin_character): + self._successive_count += 1 + self._last_latin_character = character + + def reset(self) -> None: # Abstract + self._successive_count = 0 + self._character_count = 0 + self._last_latin_character = None + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return (self._successive_count * 2) / self._character_count + + +class SuspiciousRange(MessDetectorPlugin): + def __init__(self) -> None: + self._suspicious_successive_range_count: int = 0 + self._character_count: int = 0 + self._last_printable_seen: str | None = None + + def eligible(self, character: str) -> bool: + return character.isprintable() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if ( + character.isspace() + or is_punctuation(character) + or character in COMMON_SAFE_ASCII_CHARACTERS + ): + self._last_printable_seen = None + return + + if self._last_printable_seen is None: + self._last_printable_seen = character + return + + unicode_range_a: str | None = unicode_range(self._last_printable_seen) + unicode_range_b: str | None = unicode_range(character) + + if is_suspiciously_successive_range(unicode_range_a, unicode_range_b): + self._suspicious_successive_range_count += 1 + + self._last_printable_seen = character + + def reset(self) -> None: # Abstract + self._character_count = 0 + self._suspicious_successive_range_count = 0 + self._last_printable_seen = None + + @property + def ratio(self) -> float: + if self._character_count <= 13: + return 0.0 + + ratio_of_suspicious_range_usage: float = ( + self._suspicious_successive_range_count * 2 + ) / self._character_count + + return ratio_of_suspicious_range_usage + + +class SuperWeirdWordPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._word_count: int = 0 + self._bad_word_count: int = 0 + self._foreign_long_count: int = 0 + + self._is_current_word_bad: bool = False + self._foreign_long_watch: bool = False + + self._character_count: int = 0 + self._bad_character_count: int = 0 + + self._buffer: str = "" + self._buffer_accent_count: int = 0 + self._buffer_glyph_count: int = 0 + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + if character.isalpha(): + self._buffer += character + if is_accentuated(character): + self._buffer_accent_count += 1 + if ( + self._foreign_long_watch is False + and (is_latin(character) is False or is_accentuated(character)) + and is_cjk(character) is False + and is_hangul(character) is False + and is_katakana(character) is False + and is_hiragana(character) is False + and is_thai(character) is False + ): + self._foreign_long_watch = True + if ( + is_cjk(character) + or is_hangul(character) + or is_katakana(character) + or is_hiragana(character) + or is_thai(character) + ): + self._buffer_glyph_count += 1 + return + if not self._buffer: + return + if ( + character.isspace() or is_punctuation(character) or is_separator(character) + ) and self._buffer: + self._word_count += 1 + buffer_length: int = len(self._buffer) + + self._character_count += buffer_length + + if buffer_length >= 4: + if self._buffer_accent_count / buffer_length >= 0.5: + self._is_current_word_bad = True + # Word/Buffer ending with an upper case accentuated letter are so rare, + # that we will consider them all as suspicious. Same weight as foreign_long suspicious. + elif ( + is_accentuated(self._buffer[-1]) + and self._buffer[-1].isupper() + and all(_.isupper() for _ in self._buffer) is False + ): + self._foreign_long_count += 1 + self._is_current_word_bad = True + elif self._buffer_glyph_count == 1: + self._is_current_word_bad = True + self._foreign_long_count += 1 + if buffer_length >= 24 and self._foreign_long_watch: + camel_case_dst = [ + i + for c, i in zip(self._buffer, range(0, buffer_length)) + if c.isupper() + ] + probable_camel_cased: bool = False + + if camel_case_dst and (len(camel_case_dst) / buffer_length <= 0.3): + probable_camel_cased = True + + if not probable_camel_cased: + self._foreign_long_count += 1 + self._is_current_word_bad = True + + if self._is_current_word_bad: + self._bad_word_count += 1 + self._bad_character_count += len(self._buffer) + self._is_current_word_bad = False + + self._foreign_long_watch = False + self._buffer = "" + self._buffer_accent_count = 0 + self._buffer_glyph_count = 0 + elif ( + character not in {"<", ">", "-", "=", "~", "|", "_"} + and character.isdigit() is False + and is_symbol(character) + ): + self._is_current_word_bad = True + self._buffer += character + + def reset(self) -> None: # Abstract + self._buffer = "" + self._is_current_word_bad = False + self._foreign_long_watch = False + self._bad_word_count = 0 + self._word_count = 0 + self._character_count = 0 + self._bad_character_count = 0 + self._foreign_long_count = 0 + + @property + def ratio(self) -> float: + if self._word_count <= 10 and self._foreign_long_count == 0: + return 0.0 + + return self._bad_character_count / self._character_count + + +class CjkUncommonPlugin(MessDetectorPlugin): + """ + Detect messy CJK text that probably means nothing. + """ + + def __init__(self) -> None: + self._character_count: int = 0 + self._uncommon_count: int = 0 + + def eligible(self, character: str) -> bool: + return is_cjk(character) + + def feed(self, character: str) -> None: + self._character_count += 1 + + if is_cjk_uncommon(character): + self._uncommon_count += 1 + return + + def reset(self) -> None: # Abstract + self._character_count = 0 + self._uncommon_count = 0 + + @property + def ratio(self) -> float: + if self._character_count < 8: + return 0.0 + + uncommon_form_usage: float = self._uncommon_count / self._character_count + + # we can be pretty sure it's garbage when uncommon characters are widely + # used. otherwise it could just be traditional chinese for example. + return uncommon_form_usage / 10 if uncommon_form_usage > 0.5 else 0.0 + + +class ArchaicUpperLowerPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._buf: bool = False + + self._character_count_since_last_sep: int = 0 + + self._successive_upper_lower_count: int = 0 + self._successive_upper_lower_count_final: int = 0 + + self._character_count: int = 0 + + self._last_alpha_seen: str | None = None + self._current_ascii_only: bool = True + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + is_concerned = character.isalpha() and is_case_variable(character) + chunk_sep = is_concerned is False + + if chunk_sep and self._character_count_since_last_sep > 0: + if ( + self._character_count_since_last_sep <= 64 + and character.isdigit() is False + and self._current_ascii_only is False + ): + self._successive_upper_lower_count_final += ( + self._successive_upper_lower_count + ) + + self._successive_upper_lower_count = 0 + self._character_count_since_last_sep = 0 + self._last_alpha_seen = None + self._buf = False + self._character_count += 1 + self._current_ascii_only = True + + return + + if self._current_ascii_only is True and character.isascii() is False: + self._current_ascii_only = False + + if self._last_alpha_seen is not None: + if (character.isupper() and self._last_alpha_seen.islower()) or ( + character.islower() and self._last_alpha_seen.isupper() + ): + if self._buf is True: + self._successive_upper_lower_count += 2 + self._buf = False + else: + self._buf = True + else: + self._buf = False + + self._character_count += 1 + self._character_count_since_last_sep += 1 + self._last_alpha_seen = character + + def reset(self) -> None: # Abstract + self._character_count = 0 + self._character_count_since_last_sep = 0 + self._successive_upper_lower_count = 0 + self._successive_upper_lower_count_final = 0 + self._last_alpha_seen = None + self._buf = False + self._current_ascii_only = True + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return self._successive_upper_lower_count_final / self._character_count + + +class ArabicIsolatedFormPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._character_count: int = 0 + self._isolated_form_count: int = 0 + + def reset(self) -> None: # Abstract + self._character_count = 0 + self._isolated_form_count = 0 + + def eligible(self, character: str) -> bool: + return is_arabic(character) + + def feed(self, character: str) -> None: + self._character_count += 1 + + if is_arabic_isolated_form(character): + self._isolated_form_count += 1 + + @property + def ratio(self) -> float: + if self._character_count < 8: + return 0.0 + + isolated_form_usage: float = self._isolated_form_count / self._character_count + + return isolated_form_usage + + +@lru_cache(maxsize=1024) +def is_suspiciously_successive_range( + unicode_range_a: str | None, unicode_range_b: str | None +) -> bool: + """ + Determine if two Unicode range seen next to each other can be considered as suspicious. + """ + if unicode_range_a is None or unicode_range_b is None: + return True + + if unicode_range_a == unicode_range_b: + return False + + if "Latin" in unicode_range_a and "Latin" in unicode_range_b: + return False + + if "Emoticons" in unicode_range_a or "Emoticons" in unicode_range_b: + return False + + # Latin characters can be accompanied with a combining diacritical mark + # eg. Vietnamese. + if ("Latin" in unicode_range_a or "Latin" in unicode_range_b) and ( + "Combining" in unicode_range_a or "Combining" in unicode_range_b + ): + return False + + keywords_range_a, keywords_range_b = ( + unicode_range_a.split(" "), + unicode_range_b.split(" "), + ) + + for el in keywords_range_a: + if el in UNICODE_SECONDARY_RANGE_KEYWORD: + continue + if el in keywords_range_b: + return False + + # Japanese Exception + range_a_jp_chars, range_b_jp_chars = ( + unicode_range_a + in ( + "Hiragana", + "Katakana", + ), + unicode_range_b in ("Hiragana", "Katakana"), + ) + if (range_a_jp_chars or range_b_jp_chars) and ( + "CJK" in unicode_range_a or "CJK" in unicode_range_b + ): + return False + if range_a_jp_chars and range_b_jp_chars: + return False + + if "Hangul" in unicode_range_a or "Hangul" in unicode_range_b: + if "CJK" in unicode_range_a or "CJK" in unicode_range_b: + return False + if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin": + return False + + # Chinese/Japanese use dedicated range for punctuation and/or separators. + if ("CJK" in unicode_range_a or "CJK" in unicode_range_b) or ( + unicode_range_a in ["Katakana", "Hiragana"] + and unicode_range_b in ["Katakana", "Hiragana"] + ): + if "Punctuation" in unicode_range_a or "Punctuation" in unicode_range_b: + return False + if "Forms" in unicode_range_a or "Forms" in unicode_range_b: + return False + if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin": + return False + + return True + + +@lru_cache(maxsize=2048) +def mess_ratio( + decoded_sequence: str, maximum_threshold: float = 0.2, debug: bool = False +) -> float: + """ + Compute a mess ratio given a decoded bytes sequence. The maximum threshold does stop the computation earlier. + """ + + detectors: list[MessDetectorPlugin] = [ + md_class() for md_class in MessDetectorPlugin.__subclasses__() + ] + + length: int = len(decoded_sequence) + 1 + + mean_mess_ratio: float = 0.0 + + if length < 512: + intermediary_mean_mess_ratio_calc: int = 32 + elif length <= 1024: + intermediary_mean_mess_ratio_calc = 64 + else: + intermediary_mean_mess_ratio_calc = 128 + + for character, index in zip(decoded_sequence + "\n", range(length)): + for detector in detectors: + if detector.eligible(character): + detector.feed(character) + + if ( + index > 0 and index % intermediary_mean_mess_ratio_calc == 0 + ) or index == length - 1: + mean_mess_ratio = sum(dt.ratio for dt in detectors) + + if mean_mess_ratio >= maximum_threshold: + break + + if debug: + logger = getLogger("charset_normalizer") + + logger.log( + TRACE, + "Mess-detector extended-analysis start. " + f"intermediary_mean_mess_ratio_calc={intermediary_mean_mess_ratio_calc} mean_mess_ratio={mean_mess_ratio} " + f"maximum_threshold={maximum_threshold}", + ) + + if len(decoded_sequence) > 16: + logger.log(TRACE, f"Starting with: {decoded_sequence[:16]}") + logger.log(TRACE, f"Ending with: {decoded_sequence[-16::]}") + + for dt in detectors: + logger.log(TRACE, f"{dt.__class__}: {dt.ratio}") + + return round(mean_mess_ratio, 3) diff --git a/venv/lib/python3.13/site-packages/charset_normalizer/models.py b/venv/lib/python3.13/site-packages/charset_normalizer/models.py new file mode 100644 index 0000000000000000000000000000000000000000..1042758f873d2a54f7078c5411b17ffc11dca4ee --- /dev/null +++ b/venv/lib/python3.13/site-packages/charset_normalizer/models.py @@ -0,0 +1,360 @@ +from __future__ import annotations + +from encodings.aliases import aliases +from hashlib import sha256 +from json import dumps +from re import sub +from typing import Any, Iterator, List, Tuple + +from .constant import RE_POSSIBLE_ENCODING_INDICATION, TOO_BIG_SEQUENCE +from .utils import iana_name, is_multi_byte_encoding, unicode_range + + +class CharsetMatch: + def __init__( + self, + payload: bytes, + guessed_encoding: str, + mean_mess_ratio: float, + has_sig_or_bom: bool, + languages: CoherenceMatches, + decoded_payload: str | None = None, + preemptive_declaration: str | None = None, + ): + self._payload: bytes = payload + + self._encoding: str = guessed_encoding + self._mean_mess_ratio: float = mean_mess_ratio + self._languages: CoherenceMatches = languages + self._has_sig_or_bom: bool = has_sig_or_bom + self._unicode_ranges: list[str] | None = None + + self._leaves: list[CharsetMatch] = [] + self._mean_coherence_ratio: float = 0.0 + + self._output_payload: bytes | None = None + self._output_encoding: str | None = None + + self._string: str | None = decoded_payload + + self._preemptive_declaration: str | None = preemptive_declaration + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CharsetMatch): + if isinstance(other, str): + return iana_name(other) == self.encoding + return False + return self.encoding == other.encoding and self.fingerprint == other.fingerprint + + def __lt__(self, other: object) -> bool: + """ + Implemented to make sorted available upon CharsetMatches items. + """ + if not isinstance(other, CharsetMatch): + raise ValueError + + chaos_difference: float = abs(self.chaos - other.chaos) + coherence_difference: float = abs(self.coherence - other.coherence) + + # Below 1% difference --> Use Coherence + if chaos_difference < 0.01 and coherence_difference > 0.02: + return self.coherence > other.coherence + elif chaos_difference < 0.01 and coherence_difference <= 0.02: + # When having a difficult decision, use the result that decoded as many multi-byte as possible. + # preserve RAM usage! + if len(self._payload) >= TOO_BIG_SEQUENCE: + return self.chaos < other.chaos + return self.multi_byte_usage > other.multi_byte_usage + + return self.chaos < other.chaos + + @property + def multi_byte_usage(self) -> float: + return 1.0 - (len(str(self)) / len(self.raw)) + + def __str__(self) -> str: + # Lazy Str Loading + if self._string is None: + self._string = str(self._payload, self._encoding, "strict") + return self._string + + def __repr__(self) -> str: + return f"" + + def add_submatch(self, other: CharsetMatch) -> None: + if not isinstance(other, CharsetMatch) or other == self: + raise ValueError( + "Unable to add instance <{}> as a submatch of a CharsetMatch".format( + other.__class__ + ) + ) + + other._string = None # Unload RAM usage; dirty trick. + self._leaves.append(other) + + @property + def encoding(self) -> str: + return self._encoding + + @property + def encoding_aliases(self) -> list[str]: + """ + Encoding name are known by many name, using this could help when searching for IBM855 when it's listed as CP855. + """ + also_known_as: list[str] = [] + for u, p in aliases.items(): + if self.encoding == u: + also_known_as.append(p) + elif self.encoding == p: + also_known_as.append(u) + return also_known_as + + @property + def bom(self) -> bool: + return self._has_sig_or_bom + + @property + def byte_order_mark(self) -> bool: + return self._has_sig_or_bom + + @property + def languages(self) -> list[str]: + """ + Return the complete list of possible languages found in decoded sequence. + Usually not really useful. Returned list may be empty even if 'language' property return something != 'Unknown'. + """ + return [e[0] for e in self._languages] + + @property + def language(self) -> str: + """ + Most probable language found in decoded sequence. If none were detected or inferred, the property will return + "Unknown". + """ + if not self._languages: + # Trying to infer the language based on the given encoding + # Its either English or we should not pronounce ourselves in certain cases. + if "ascii" in self.could_be_from_charset: + return "English" + + # doing it there to avoid circular import + from charset_normalizer.cd import encoding_languages, mb_encoding_languages + + languages = ( + mb_encoding_languages(self.encoding) + if is_multi_byte_encoding(self.encoding) + else encoding_languages(self.encoding) + ) + + if len(languages) == 0 or "Latin Based" in languages: + return "Unknown" + + return languages[0] + + return self._languages[0][0] + + @property + def chaos(self) -> float: + return self._mean_mess_ratio + + @property + def coherence(self) -> float: + if not self._languages: + return 0.0 + return self._languages[0][1] + + @property + def percent_chaos(self) -> float: + return round(self.chaos * 100, ndigits=3) + + @property + def percent_coherence(self) -> float: + return round(self.coherence * 100, ndigits=3) + + @property + def raw(self) -> bytes: + """ + Original untouched bytes. + """ + return self._payload + + @property + def submatch(self) -> list[CharsetMatch]: + return self._leaves + + @property + def has_submatch(self) -> bool: + return len(self._leaves) > 0 + + @property + def alphabets(self) -> list[str]: + if self._unicode_ranges is not None: + return self._unicode_ranges + # list detected ranges + detected_ranges: list[str | None] = [unicode_range(char) for char in str(self)] + # filter and sort + self._unicode_ranges = sorted(list({r for r in detected_ranges if r})) + return self._unicode_ranges + + @property + def could_be_from_charset(self) -> list[str]: + """ + The complete list of encoding that output the exact SAME str result and therefore could be the originating + encoding. + This list does include the encoding available in property 'encoding'. + """ + return [self._encoding] + [m.encoding for m in self._leaves] + + def output(self, encoding: str = "utf_8") -> bytes: + """ + Method to get re-encoded bytes payload using given target encoding. Default to UTF-8. + Any errors will be simply ignored by the encoder NOT replaced. + """ + if self._output_encoding is None or self._output_encoding != encoding: + self._output_encoding = encoding + decoded_string = str(self) + if ( + self._preemptive_declaration is not None + and self._preemptive_declaration.lower() + not in ["utf-8", "utf8", "utf_8"] + ): + patched_header = sub( + RE_POSSIBLE_ENCODING_INDICATION, + lambda m: m.string[m.span()[0] : m.span()[1]].replace( + m.groups()[0], + iana_name(self._output_encoding).replace("_", "-"), # type: ignore[arg-type] + ), + decoded_string[:8192], + count=1, + ) + + decoded_string = patched_header + decoded_string[8192:] + + self._output_payload = decoded_string.encode(encoding, "replace") + + return self._output_payload # type: ignore + + @property + def fingerprint(self) -> str: + """ + Retrieve the unique SHA256 computed using the transformed (re-encoded) payload. Not the original one. + """ + return sha256(self.output()).hexdigest() + + +class CharsetMatches: + """ + Container with every CharsetMatch items ordered by default from most probable to the less one. + Act like a list(iterable) but does not implements all related methods. + """ + + def __init__(self, results: list[CharsetMatch] | None = None): + self._results: list[CharsetMatch] = sorted(results) if results else [] + + def __iter__(self) -> Iterator[CharsetMatch]: + yield from self._results + + def __getitem__(self, item: int | str) -> CharsetMatch: + """ + Retrieve a single item either by its position or encoding name (alias may be used here). + Raise KeyError upon invalid index or encoding not present in results. + """ + if isinstance(item, int): + return self._results[item] + if isinstance(item, str): + item = iana_name(item, False) + for result in self._results: + if item in result.could_be_from_charset: + return result + raise KeyError + + def __len__(self) -> int: + return len(self._results) + + def __bool__(self) -> bool: + return len(self._results) > 0 + + def append(self, item: CharsetMatch) -> None: + """ + Insert a single match. Will be inserted accordingly to preserve sort. + Can be inserted as a submatch. + """ + if not isinstance(item, CharsetMatch): + raise ValueError( + "Cannot append instance '{}' to CharsetMatches".format( + str(item.__class__) + ) + ) + # We should disable the submatch factoring when the input file is too heavy (conserve RAM usage) + if len(item.raw) < TOO_BIG_SEQUENCE: + for match in self._results: + if match.fingerprint == item.fingerprint and match.chaos == item.chaos: + match.add_submatch(item) + return + self._results.append(item) + self._results = sorted(self._results) + + def best(self) -> CharsetMatch | None: + """ + Simply return the first match. Strict equivalent to matches[0]. + """ + if not self._results: + return None + return self._results[0] + + def first(self) -> CharsetMatch | None: + """ + Redundant method, call the method best(). Kept for BC reasons. + """ + return self.best() + + +CoherenceMatch = Tuple[str, float] +CoherenceMatches = List[CoherenceMatch] + + +class CliDetectionResult: + def __init__( + self, + path: str, + encoding: str | None, + encoding_aliases: list[str], + alternative_encodings: list[str], + language: str, + alphabets: list[str], + has_sig_or_bom: bool, + chaos: float, + coherence: float, + unicode_path: str | None, + is_preferred: bool, + ): + self.path: str = path + self.unicode_path: str | None = unicode_path + self.encoding: str | None = encoding + self.encoding_aliases: list[str] = encoding_aliases + self.alternative_encodings: list[str] = alternative_encodings + self.language: str = language + self.alphabets: list[str] = alphabets + self.has_sig_or_bom: bool = has_sig_or_bom + self.chaos: float = chaos + self.coherence: float = coherence + self.is_preferred: bool = is_preferred + + @property + def __dict__(self) -> dict[str, Any]: # type: ignore + return { + "path": self.path, + "encoding": self.encoding, + "encoding_aliases": self.encoding_aliases, + "alternative_encodings": self.alternative_encodings, + "language": self.language, + "alphabets": self.alphabets, + "has_sig_or_bom": self.has_sig_or_bom, + "chaos": self.chaos, + "coherence": self.coherence, + "unicode_path": self.unicode_path, + "is_preferred": self.is_preferred, + } + + def to_json(self) -> str: + return dumps(self.__dict__, ensure_ascii=True, indent=4) diff --git a/venv/lib/python3.13/site-packages/charset_normalizer/py.typed b/venv/lib/python3.13/site-packages/charset_normalizer/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.13/site-packages/charset_normalizer/utils.py b/venv/lib/python3.13/site-packages/charset_normalizer/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..6bf0384ccf94216f79881674f54182a93e17def0 --- /dev/null +++ b/venv/lib/python3.13/site-packages/charset_normalizer/utils.py @@ -0,0 +1,414 @@ +from __future__ import annotations + +import importlib +import logging +import unicodedata +from codecs import IncrementalDecoder +from encodings.aliases import aliases +from functools import lru_cache +from re import findall +from typing import Generator + +from _multibytecodec import ( # type: ignore[import-not-found,import] + MultibyteIncrementalDecoder, +) + +from .constant import ( + ENCODING_MARKS, + IANA_SUPPORTED_SIMILAR, + RE_POSSIBLE_ENCODING_INDICATION, + UNICODE_RANGES_COMBINED, + UNICODE_SECONDARY_RANGE_KEYWORD, + UTF8_MAXIMAL_ALLOCATION, + COMMON_CJK_CHARACTERS, +) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_accentuated(character: str) -> bool: + try: + description: str = unicodedata.name(character) + except ValueError: # Defensive: unicode database outdated? + return False + return ( + "WITH GRAVE" in description + or "WITH ACUTE" in description + or "WITH CEDILLA" in description + or "WITH DIAERESIS" in description + or "WITH CIRCUMFLEX" in description + or "WITH TILDE" in description + or "WITH MACRON" in description + or "WITH RING ABOVE" in description + ) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def remove_accent(character: str) -> str: + decomposed: str = unicodedata.decomposition(character) + if not decomposed: + return character + + codes: list[str] = decomposed.split(" ") + + return chr(int(codes[0], 16)) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def unicode_range(character: str) -> str | None: + """ + Retrieve the Unicode range official name from a single character. + """ + character_ord: int = ord(character) + + for range_name, ord_range in UNICODE_RANGES_COMBINED.items(): + if character_ord in ord_range: + return range_name + + return None + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_latin(character: str) -> bool: + try: + description: str = unicodedata.name(character) + except ValueError: # Defensive: unicode database outdated? + return False + return "LATIN" in description + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_punctuation(character: str) -> bool: + character_category: str = unicodedata.category(character) + + if "P" in character_category: + return True + + character_range: str | None = unicode_range(character) + + if character_range is None: + return False + + return "Punctuation" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_symbol(character: str) -> bool: + character_category: str = unicodedata.category(character) + + if "S" in character_category or "N" in character_category: + return True + + character_range: str | None = unicode_range(character) + + if character_range is None: + return False + + return "Forms" in character_range and character_category != "Lo" + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_emoticon(character: str) -> bool: + character_range: str | None = unicode_range(character) + + if character_range is None: + return False + + return "Emoticons" in character_range or "Pictographs" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_separator(character: str) -> bool: + if character.isspace() or character in {"|", "+", "<", ">"}: + return True + + character_category: str = unicodedata.category(character) + + return "Z" in character_category or character_category in {"Po", "Pd", "Pc"} + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_case_variable(character: str) -> bool: + return character.islower() != character.isupper() + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_cjk(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: # Defensive: unicode database outdated? + return False + + return "CJK" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_hiragana(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: # Defensive: unicode database outdated? + return False + + return "HIRAGANA" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_katakana(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: # Defensive: unicode database outdated? + return False + + return "KATAKANA" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_hangul(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: # Defensive: unicode database outdated? + return False + + return "HANGUL" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_thai(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: # Defensive: unicode database outdated? + return False + + return "THAI" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_arabic(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: # Defensive: unicode database outdated? + return False + + return "ARABIC" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_arabic_isolated_form(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: # Defensive: unicode database outdated? + return False + + return "ARABIC" in character_name and "ISOLATED FORM" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_cjk_uncommon(character: str) -> bool: + return character not in COMMON_CJK_CHARACTERS + + +@lru_cache(maxsize=len(UNICODE_RANGES_COMBINED)) +def is_unicode_range_secondary(range_name: str) -> bool: + return any(keyword in range_name for keyword in UNICODE_SECONDARY_RANGE_KEYWORD) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_unprintable(character: str) -> bool: + return ( + character.isspace() is False # includes \n \t \r \v + and character.isprintable() is False + and character != "\x1a" # Why? Its the ASCII substitute character. + and character != "\ufeff" # bug discovered in Python, + # Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space. + ) + + +def any_specified_encoding(sequence: bytes, search_zone: int = 8192) -> str | None: + """ + Extract using ASCII-only decoder any specified encoding in the first n-bytes. + """ + if not isinstance(sequence, bytes): + raise TypeError + + seq_len: int = len(sequence) + + results: list[str] = findall( + RE_POSSIBLE_ENCODING_INDICATION, + sequence[: min(seq_len, search_zone)].decode("ascii", errors="ignore"), + ) + + if len(results) == 0: + return None + + for specified_encoding in results: + specified_encoding = specified_encoding.lower().replace("-", "_") + + encoding_alias: str + encoding_iana: str + + for encoding_alias, encoding_iana in aliases.items(): + if encoding_alias == specified_encoding: + return encoding_iana + if encoding_iana == specified_encoding: + return encoding_iana + + return None + + +@lru_cache(maxsize=128) +def is_multi_byte_encoding(name: str) -> bool: + """ + Verify is a specific encoding is a multi byte one based on it IANA name + """ + return name in { + "utf_8", + "utf_8_sig", + "utf_16", + "utf_16_be", + "utf_16_le", + "utf_32", + "utf_32_le", + "utf_32_be", + "utf_7", + } or issubclass( + importlib.import_module(f"encodings.{name}").IncrementalDecoder, + MultibyteIncrementalDecoder, + ) + + +def identify_sig_or_bom(sequence: bytes) -> tuple[str | None, bytes]: + """ + Identify and extract SIG/BOM in given sequence. + """ + + for iana_encoding in ENCODING_MARKS: + marks: bytes | list[bytes] = ENCODING_MARKS[iana_encoding] + + if isinstance(marks, bytes): + marks = [marks] + + for mark in marks: + if sequence.startswith(mark): + return iana_encoding, mark + + return None, b"" + + +def should_strip_sig_or_bom(iana_encoding: str) -> bool: + return iana_encoding not in {"utf_16", "utf_32"} + + +def iana_name(cp_name: str, strict: bool = True) -> str: + """Returns the Python normalized encoding name (Not the IANA official name).""" + cp_name = cp_name.lower().replace("-", "_") + + encoding_alias: str + encoding_iana: str + + for encoding_alias, encoding_iana in aliases.items(): + if cp_name in [encoding_alias, encoding_iana]: + return encoding_iana + + if strict: + raise ValueError(f"Unable to retrieve IANA for '{cp_name}'") + + return cp_name + + +def cp_similarity(iana_name_a: str, iana_name_b: str) -> float: + if is_multi_byte_encoding(iana_name_a) or is_multi_byte_encoding(iana_name_b): + return 0.0 + + decoder_a = importlib.import_module(f"encodings.{iana_name_a}").IncrementalDecoder + decoder_b = importlib.import_module(f"encodings.{iana_name_b}").IncrementalDecoder + + id_a: IncrementalDecoder = decoder_a(errors="ignore") + id_b: IncrementalDecoder = decoder_b(errors="ignore") + + character_match_count: int = 0 + + for i in range(255): + to_be_decoded: bytes = bytes([i]) + if id_a.decode(to_be_decoded) == id_b.decode(to_be_decoded): + character_match_count += 1 + + return character_match_count / 254 + + +def is_cp_similar(iana_name_a: str, iana_name_b: str) -> bool: + """ + Determine if two code page are at least 80% similar. IANA_SUPPORTED_SIMILAR dict was generated using + the function cp_similarity. + """ + return ( + iana_name_a in IANA_SUPPORTED_SIMILAR + and iana_name_b in IANA_SUPPORTED_SIMILAR[iana_name_a] + ) + + +def set_logging_handler( + name: str = "charset_normalizer", + level: int = logging.INFO, + format_string: str = "%(asctime)s | %(levelname)s | %(message)s", +) -> None: + logger = logging.getLogger(name) + logger.setLevel(level) + + handler = logging.StreamHandler() + handler.setFormatter(logging.Formatter(format_string)) + logger.addHandler(handler) + + +def cut_sequence_chunks( + sequences: bytes, + encoding_iana: str, + offsets: range, + chunk_size: int, + bom_or_sig_available: bool, + strip_sig_or_bom: bool, + sig_payload: bytes, + is_multi_byte_decoder: bool, + decoded_payload: str | None = None, +) -> Generator[str, None, None]: + if decoded_payload and is_multi_byte_decoder is False: + for i in offsets: + chunk = decoded_payload[i : i + chunk_size] + if not chunk: + break + yield chunk + else: + for i in offsets: + chunk_end = i + chunk_size + if chunk_end > len(sequences) + 8: + continue + + cut_sequence = sequences[i : i + chunk_size] + + if bom_or_sig_available and strip_sig_or_bom is False: + cut_sequence = sig_payload + cut_sequence + + chunk = cut_sequence.decode( + encoding_iana, + errors="ignore" if is_multi_byte_decoder else "strict", + ) + + # multi-byte bad cutting detector and adjustment + # not the cleanest way to perform that fix but clever enough for now. + if is_multi_byte_decoder and i > 0: + chunk_partial_size_chk: int = min(chunk_size, 16) + + if ( + decoded_payload + and chunk[:chunk_partial_size_chk] not in decoded_payload + ): + for j in range(i, i - 4, -1): + cut_sequence = sequences[j:chunk_end] + + if bom_or_sig_available and strip_sig_or_bom is False: + cut_sequence = sig_payload + cut_sequence + + chunk = cut_sequence.decode(encoding_iana, errors="ignore") + + if chunk[:chunk_partial_size_chk] in decoded_payload: + break + + yield chunk diff --git a/venv/lib/python3.13/site-packages/charset_normalizer/version.py b/venv/lib/python3.13/site-packages/charset_normalizer/version.py new file mode 100644 index 0000000000000000000000000000000000000000..c843e5331131fe69b5bba96d3f0c1160dcaa943b --- /dev/null +++ b/venv/lib/python3.13/site-packages/charset_normalizer/version.py @@ -0,0 +1,8 @@ +""" +Expose version +""" + +from __future__ import annotations + +__version__ = "3.4.4" +VERSION = __version__.split(".") diff --git a/venv/lib/python3.13/site-packages/filelock-3.20.0.dist-info/INSTALLER b/venv/lib/python3.13/site-packages/filelock-3.20.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.13/site-packages/filelock-3.20.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.13/site-packages/filelock-3.20.0.dist-info/METADATA b/venv/lib/python3.13/site-packages/filelock-3.20.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..bef50192a5dff2da4576b44da78cca6cfed2987c --- /dev/null +++ b/venv/lib/python3.13/site-packages/filelock-3.20.0.dist-info/METADATA @@ -0,0 +1,42 @@ +Metadata-Version: 2.4 +Name: filelock +Version: 3.20.0 +Summary: A platform independent file lock. +Project-URL: Documentation, https://py-filelock.readthedocs.io +Project-URL: Homepage, https://github.com/tox-dev/py-filelock +Project-URL: Source, https://github.com/tox-dev/py-filelock +Project-URL: Tracker, https://github.com/tox-dev/py-filelock/issues +Maintainer-email: Bernát Gábor +License-Expression: Unlicense +License-File: LICENSE +Keywords: application,cache,directory,log,user +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: The Unlicense (Unlicense) +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Topic :: Internet +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: System +Requires-Python: >=3.10 +Description-Content-Type: text/markdown + +# filelock + +[![PyPI](https://img.shields.io/pypi/v/filelock)](https://pypi.org/project/filelock/) +[![Supported Python +versions](https://img.shields.io/pypi/pyversions/filelock.svg)](https://pypi.org/project/filelock/) +[![Documentation +status](https://readthedocs.org/projects/py-filelock/badge/?version=latest)](https://py-filelock.readthedocs.io/en/latest/?badge=latest) +[![Code style: +black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) +[![Downloads](https://static.pepy.tech/badge/filelock/month)](https://pepy.tech/project/filelock) +[![check](https://github.com/tox-dev/py-filelock/actions/workflows/check.yaml/badge.svg)](https://github.com/tox-dev/py-filelock/actions/workflows/check.yaml) + +For more information checkout the [official documentation](https://py-filelock.readthedocs.io/en/latest/index.html). diff --git a/venv/lib/python3.13/site-packages/filelock-3.20.0.dist-info/RECORD b/venv/lib/python3.13/site-packages/filelock-3.20.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..0eefb7fd3a3de6710dfd23b4cc6bcd4c90cfec30 --- /dev/null +++ b/venv/lib/python3.13/site-packages/filelock-3.20.0.dist-info/RECORD @@ -0,0 +1,24 @@ +filelock-3.20.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +filelock-3.20.0.dist-info/METADATA,sha256=gIghqdcbGNywxw52pN02_a9OxFqzhjA8v-9GsDWtNog,2110 +filelock-3.20.0.dist-info/RECORD,, +filelock-3.20.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87 +filelock-3.20.0.dist-info/licenses/LICENSE,sha256=iNm062BXnBkew5HKBMFhMFctfu3EqG2qWL8oxuFMm80,1210 +filelock/__init__.py,sha256=_t_-OAGXo_qyPa9lNQ1YnzVYEvSW3I0onPqzpomsVVg,1769 +filelock/__pycache__/__init__.cpython-313.pyc,, +filelock/__pycache__/_api.cpython-313.pyc,, +filelock/__pycache__/_error.cpython-313.pyc,, +filelock/__pycache__/_soft.cpython-313.pyc,, +filelock/__pycache__/_unix.cpython-313.pyc,, +filelock/__pycache__/_util.cpython-313.pyc,, +filelock/__pycache__/_windows.cpython-313.pyc,, +filelock/__pycache__/asyncio.cpython-313.pyc,, +filelock/__pycache__/version.cpython-313.pyc,, +filelock/_api.py,sha256=2aATBeJ3-jtMj5OSm7EE539iNaTBsf13KXtcBMoi8oM,14545 +filelock/_error.py,sha256=-5jMcjTu60YAvAO1UbqDD1GIEjVkwr8xCFwDBtMeYDg,787 +filelock/_soft.py,sha256=haqtc_TB_KJbYv2a8iuEAclKuM4fMG1vTcp28sK919c,1711 +filelock/_unix.py,sha256=eGOs4gDgZ-5fGnJUz-OkJDeZkAMzgvYcD8hVD6XH7e4,2351 +filelock/_util.py,sha256=QHBoNFIYfbAThhotH3Q8E2acFc84wpG49-T-uu017ZE,1715 +filelock/_windows.py,sha256=8k4XIBl_zZVfGC2gz0kEr8DZBvpNa8wdU9qeM1YrBb8,2179 +filelock/asyncio.py,sha256=dSLe6XZSECFOgsVpcQUSh5Y5zAHxHGPu_tfpPX9I45k,12514 +filelock/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +filelock/version.py,sha256=AW5MeEjK4TaQWWJrGb_AlBw8PlmFoIcn7GodG_AVSOM,706 diff --git a/venv/lib/python3.13/site-packages/filelock-3.20.0.dist-info/WHEEL b/venv/lib/python3.13/site-packages/filelock-3.20.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..12228d414b6cfed7c39d3781c85c63256a1d7fb5 --- /dev/null +++ b/venv/lib/python3.13/site-packages/filelock-3.20.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.27.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.13/site-packages/hf_xet-1.2.0.dist-info/INSTALLER b/venv/lib/python3.13/site-packages/hf_xet-1.2.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.13/site-packages/hf_xet-1.2.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.13/site-packages/hf_xet-1.2.0.dist-info/METADATA b/venv/lib/python3.13/site-packages/hf_xet-1.2.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..fe69ec98f6826b1530deccedfd48c96aec25c998 --- /dev/null +++ b/venv/lib/python3.13/site-packages/hf_xet-1.2.0.dist-info/METADATA @@ -0,0 +1,87 @@ +Metadata-Version: 2.4 +Name: hf-xet +Version: 1.2.0 +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Rust +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Programming Language :: Python :: Free Threading +Classifier: Programming Language :: Python :: Free Threading :: 2 - Beta +Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence +Requires-Dist: pytest ; extra == 'tests' +Provides-Extra: tests +License-File: LICENSE +Summary: Fast transfer of large files with the Hugging Face Hub. +Maintainer-email: Rajat Arya , Jared Sulzdorf , Di Xiao , Assaf Vayner , Hoyt Koepke +License-Expression: Apache-2.0 +Requires-Python: >=3.8 +Description-Content-Type: text/markdown; charset=UTF-8; variant=GFM +Project-URL: Homepage, https://github.com/huggingface/xet-core +Project-URL: Documentation, https://huggingface.co/docs/hub/en/storage-backends#using-xet-storage +Project-URL: Issues, https://github.com/huggingface/xet-core/issues +Project-URL: Repository, https://github.com/huggingface/xet-core.git + + +

+ License + GitHub release + Contributor Covenant +

+ +

+

🤗 hf-xet - xet client tech, used in huggingface_hub

+

+ +## Welcome + +`hf-xet` enables `huggingface_hub` to utilize xet storage for uploading and downloading to HF Hub. Xet storage provides chunk-based deduplication, efficient storage/retrieval with local disk caching, and backwards compatibility with Git LFS. This library is not meant to be used directly, and is instead intended to be used from [huggingface_hub](https://pypi.org/project/huggingface-hub). + +## Key features + +♻ **chunk-based deduplication implementation**: avoid transferring and storing chunks that are shared across binary files (models, datasets, etc). + +🤗 **Python bindings**: bindings for [huggingface_hub](https://github.com/huggingface/huggingface_hub/) package. + +↔ **network communications**: concurrent communication to HF Hub Xet backend services (CAS). + +🔖 **local disk caching**: chunk-based cache that sits alongside the existing [huggingface_hub disk cache](https://huggingface.co/docs/huggingface_hub/guides/manage-cache). + +## Installation + +Install the `hf_xet` package with [pip](https://pypi.org/project/hf-xet/): + +```bash +pip install hf_xet +``` + +## Quick Start + +`hf_xet` is not intended to be run independently as it is expected to be used from `huggingface_hub`, so to get started with `huggingface_hub` check out the documentation [here]("https://hf.co/docs/huggingface_hub"). + +## Contributions (feature requests, bugs, etc.) are encouraged & appreciated 💙💚💛💜🧡❤️ + +Please join us in making hf-xet better. We value everyone's contributions. Code is not the only way to help. Answering questions, helping each other, improving documentation, filing issues all help immensely. If you are interested in contributing (please do!), check out the [contribution guide](https://github.com/huggingface/xet-core/blob/main/CONTRIBUTING.md) for this repository. diff --git a/venv/lib/python3.13/site-packages/hf_xet-1.2.0.dist-info/RECORD b/venv/lib/python3.13/site-packages/hf_xet-1.2.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..9a7f573615404b1679d4b3eb8ff427baaa6c484c --- /dev/null +++ b/venv/lib/python3.13/site-packages/hf_xet-1.2.0.dist-info/RECORD @@ -0,0 +1,8 @@ +hf_xet-1.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +hf_xet-1.2.0.dist-info/METADATA,sha256=U-3J7DnI-UycsH-OPV_q2_s3jhtJSkQYifQ03yS9ie8,4910 +hf_xet-1.2.0.dist-info/RECORD,, +hf_xet-1.2.0.dist-info/WHEEL,sha256=W1f4mZCUZH4n5LoWwHgwGsB1zJCLLADdZ7x6Gd7Z8X8,127 +hf_xet-1.2.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357 +hf_xet/__init__.py,sha256=E8UDdyQ8glZ_nve9hHEf22bPang8-RKx4VuApXYeQUo,107 +hf_xet/__pycache__/__init__.cpython-313.pyc,, +hf_xet/hf_xet.abi3.so,sha256=vddURwHuQEUiJXuQlm1NZ47kOW5ck53KFdD32s2mDmY,8310504 diff --git a/venv/lib/python3.13/site-packages/hf_xet-1.2.0.dist-info/WHEEL b/venv/lib/python3.13/site-packages/hf_xet-1.2.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..9ce9fdfd77f7b3a2b3b0dfd90989a3e8d75fae2d --- /dev/null +++ b/venv/lib/python3.13/site-packages/hf_xet-1.2.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: maturin (1.9.6) +Root-Is-Purelib: false +Tag: cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64 diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/__init__.py b/venv/lib/python3.13/site-packages/huggingface_hub/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d2f240f4f1004cb9071fd6ca99973578f8bd560c --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/__init__.py @@ -0,0 +1,1554 @@ +# Copyright 2020 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# *********** +# `huggingface_hub` init has 2 modes: +# - Normal usage: +# If imported to use it, all modules and functions are lazy-loaded. This means +# they exist at top level in module but are imported only the first time they are +# used. This way, `from huggingface_hub import something` will import `something` +# quickly without the hassle of importing all the features from `huggingface_hub`. +# - Static check: +# If statically analyzed, all modules and functions are loaded normally. This way +# static typing check works properly as well as autocomplete in text editors and +# IDEs. +# +# The static model imports are done inside the `if TYPE_CHECKING:` statement at +# the bottom of this file. Since module/functions imports are duplicated, it is +# mandatory to make sure to add them twice when adding one. This is checked in the +# `make quality` command. +# +# To update the static imports, please run the following command and commit the changes. +# ``` +# # Use script +# python utils/check_static_imports.py --update-file +# +# # Or run style on codebase +# make style +# ``` +# +# *********** +# Lazy loader vendored from https://github.com/scientific-python/lazy_loader +import importlib +import os +import sys +from typing import TYPE_CHECKING + + +__version__ = "0.36.0" + +# Alphabetical order of definitions is ensured in tests +# WARNING: any comment added in this dictionary definition will be lost when +# re-generating the file ! +_SUBMOD_ATTRS = { + "_commit_scheduler": [ + "CommitScheduler", + ], + "_inference_endpoints": [ + "InferenceEndpoint", + "InferenceEndpointError", + "InferenceEndpointStatus", + "InferenceEndpointTimeoutError", + "InferenceEndpointType", + ], + "_jobs_api": [ + "JobInfo", + "JobOwner", + "JobStage", + "JobStatus", + ], + "_login": [ + "auth_list", + "auth_switch", + "interpreter_login", + "login", + "logout", + "notebook_login", + ], + "_oauth": [ + "OAuthInfo", + "OAuthOrgInfo", + "OAuthUserInfo", + "attach_huggingface_oauth", + "parse_huggingface_oauth", + ], + "_snapshot_download": [ + "snapshot_download", + ], + "_space_api": [ + "SpaceHardware", + "SpaceRuntime", + "SpaceStage", + "SpaceStorage", + "SpaceVariable", + ], + "_tensorboard_logger": [ + "HFSummaryWriter", + ], + "_webhooks_payload": [ + "WebhookPayload", + "WebhookPayloadComment", + "WebhookPayloadDiscussion", + "WebhookPayloadDiscussionChanges", + "WebhookPayloadEvent", + "WebhookPayloadMovedTo", + "WebhookPayloadRepo", + "WebhookPayloadUrl", + "WebhookPayloadWebhook", + ], + "_webhooks_server": [ + "WebhooksServer", + "webhook_endpoint", + ], + "community": [ + "Discussion", + "DiscussionComment", + "DiscussionCommit", + "DiscussionEvent", + "DiscussionStatusChange", + "DiscussionTitleChange", + "DiscussionWithDetails", + ], + "constants": [ + "CONFIG_NAME", + "FLAX_WEIGHTS_NAME", + "HUGGINGFACE_CO_URL_HOME", + "HUGGINGFACE_CO_URL_TEMPLATE", + "PYTORCH_WEIGHTS_NAME", + "REPO_TYPE_DATASET", + "REPO_TYPE_MODEL", + "REPO_TYPE_SPACE", + "TF2_WEIGHTS_NAME", + "TF_WEIGHTS_NAME", + ], + "fastai_utils": [ + "_save_pretrained_fastai", + "from_pretrained_fastai", + "push_to_hub_fastai", + ], + "file_download": [ + "HfFileMetadata", + "_CACHED_NO_EXIST", + "get_hf_file_metadata", + "hf_hub_download", + "hf_hub_url", + "try_to_load_from_cache", + ], + "hf_api": [ + "Collection", + "CollectionItem", + "CommitInfo", + "CommitOperation", + "CommitOperationAdd", + "CommitOperationCopy", + "CommitOperationDelete", + "DatasetInfo", + "GitCommitInfo", + "GitRefInfo", + "GitRefs", + "HfApi", + "ModelInfo", + "Organization", + "RepoUrl", + "SpaceInfo", + "User", + "UserLikes", + "WebhookInfo", + "WebhookWatchedItem", + "accept_access_request", + "add_collection_item", + "add_space_secret", + "add_space_variable", + "auth_check", + "cancel_access_request", + "cancel_job", + "change_discussion_status", + "comment_discussion", + "create_branch", + "create_collection", + "create_commit", + "create_discussion", + "create_inference_endpoint", + "create_inference_endpoint_from_catalog", + "create_pull_request", + "create_repo", + "create_scheduled_job", + "create_scheduled_uv_job", + "create_tag", + "create_webhook", + "dataset_info", + "delete_branch", + "delete_collection", + "delete_collection_item", + "delete_file", + "delete_folder", + "delete_inference_endpoint", + "delete_repo", + "delete_scheduled_job", + "delete_space_secret", + "delete_space_storage", + "delete_space_variable", + "delete_tag", + "delete_webhook", + "disable_webhook", + "duplicate_space", + "edit_discussion_comment", + "enable_webhook", + "fetch_job_logs", + "file_exists", + "get_collection", + "get_dataset_tags", + "get_discussion_details", + "get_full_repo_name", + "get_inference_endpoint", + "get_model_tags", + "get_organization_overview", + "get_paths_info", + "get_repo_discussions", + "get_safetensors_metadata", + "get_space_runtime", + "get_space_variables", + "get_token_permission", + "get_user_overview", + "get_webhook", + "grant_access", + "inspect_job", + "inspect_scheduled_job", + "list_accepted_access_requests", + "list_collections", + "list_datasets", + "list_inference_catalog", + "list_inference_endpoints", + "list_jobs", + "list_lfs_files", + "list_liked_repos", + "list_models", + "list_organization_members", + "list_papers", + "list_pending_access_requests", + "list_rejected_access_requests", + "list_repo_commits", + "list_repo_files", + "list_repo_likers", + "list_repo_refs", + "list_repo_tree", + "list_spaces", + "list_user_followers", + "list_user_following", + "list_webhooks", + "merge_pull_request", + "model_info", + "move_repo", + "paper_info", + "parse_safetensors_file_metadata", + "pause_inference_endpoint", + "pause_space", + "permanently_delete_lfs_files", + "preupload_lfs_files", + "reject_access_request", + "rename_discussion", + "repo_exists", + "repo_info", + "repo_type_and_id_from_hf_id", + "request_space_hardware", + "request_space_storage", + "restart_space", + "resume_inference_endpoint", + "resume_scheduled_job", + "revision_exists", + "run_as_future", + "run_job", + "run_uv_job", + "scale_to_zero_inference_endpoint", + "set_space_sleep_time", + "space_info", + "super_squash_history", + "suspend_scheduled_job", + "unlike", + "update_collection_item", + "update_collection_metadata", + "update_inference_endpoint", + "update_repo_settings", + "update_repo_visibility", + "update_webhook", + "upload_file", + "upload_folder", + "upload_large_folder", + "whoami", + ], + "hf_file_system": [ + "HfFileSystem", + "HfFileSystemFile", + "HfFileSystemResolvedPath", + "HfFileSystemStreamFile", + ], + "hub_mixin": [ + "ModelHubMixin", + "PyTorchModelHubMixin", + ], + "inference._client": [ + "InferenceClient", + "InferenceTimeoutError", + ], + "inference._generated._async_client": [ + "AsyncInferenceClient", + ], + "inference._generated.types": [ + "AudioClassificationInput", + "AudioClassificationOutputElement", + "AudioClassificationOutputTransform", + "AudioClassificationParameters", + "AudioToAudioInput", + "AudioToAudioOutputElement", + "AutomaticSpeechRecognitionEarlyStoppingEnum", + "AutomaticSpeechRecognitionGenerationParameters", + "AutomaticSpeechRecognitionInput", + "AutomaticSpeechRecognitionOutput", + "AutomaticSpeechRecognitionOutputChunk", + "AutomaticSpeechRecognitionParameters", + "ChatCompletionInput", + "ChatCompletionInputFunctionDefinition", + "ChatCompletionInputFunctionName", + "ChatCompletionInputGrammarType", + "ChatCompletionInputJSONSchema", + "ChatCompletionInputMessage", + "ChatCompletionInputMessageChunk", + "ChatCompletionInputMessageChunkType", + "ChatCompletionInputResponseFormatJSONObject", + "ChatCompletionInputResponseFormatJSONSchema", + "ChatCompletionInputResponseFormatText", + "ChatCompletionInputStreamOptions", + "ChatCompletionInputTool", + "ChatCompletionInputToolCall", + "ChatCompletionInputToolChoiceClass", + "ChatCompletionInputToolChoiceEnum", + "ChatCompletionInputURL", + "ChatCompletionOutput", + "ChatCompletionOutputComplete", + "ChatCompletionOutputFunctionDefinition", + "ChatCompletionOutputLogprob", + "ChatCompletionOutputLogprobs", + "ChatCompletionOutputMessage", + "ChatCompletionOutputToolCall", + "ChatCompletionOutputTopLogprob", + "ChatCompletionOutputUsage", + "ChatCompletionStreamOutput", + "ChatCompletionStreamOutputChoice", + "ChatCompletionStreamOutputDelta", + "ChatCompletionStreamOutputDeltaToolCall", + "ChatCompletionStreamOutputFunction", + "ChatCompletionStreamOutputLogprob", + "ChatCompletionStreamOutputLogprobs", + "ChatCompletionStreamOutputTopLogprob", + "ChatCompletionStreamOutputUsage", + "DepthEstimationInput", + "DepthEstimationOutput", + "DocumentQuestionAnsweringInput", + "DocumentQuestionAnsweringInputData", + "DocumentQuestionAnsweringOutputElement", + "DocumentQuestionAnsweringParameters", + "FeatureExtractionInput", + "FeatureExtractionInputTruncationDirection", + "FillMaskInput", + "FillMaskOutputElement", + "FillMaskParameters", + "ImageClassificationInput", + "ImageClassificationOutputElement", + "ImageClassificationOutputTransform", + "ImageClassificationParameters", + "ImageSegmentationInput", + "ImageSegmentationOutputElement", + "ImageSegmentationParameters", + "ImageSegmentationSubtask", + "ImageToImageInput", + "ImageToImageOutput", + "ImageToImageParameters", + "ImageToImageTargetSize", + "ImageToTextEarlyStoppingEnum", + "ImageToTextGenerationParameters", + "ImageToTextInput", + "ImageToTextOutput", + "ImageToTextParameters", + "ImageToVideoInput", + "ImageToVideoOutput", + "ImageToVideoParameters", + "ImageToVideoTargetSize", + "ObjectDetectionBoundingBox", + "ObjectDetectionInput", + "ObjectDetectionOutputElement", + "ObjectDetectionParameters", + "Padding", + "QuestionAnsweringInput", + "QuestionAnsweringInputData", + "QuestionAnsweringOutputElement", + "QuestionAnsweringParameters", + "SentenceSimilarityInput", + "SentenceSimilarityInputData", + "SummarizationInput", + "SummarizationOutput", + "SummarizationParameters", + "SummarizationTruncationStrategy", + "TableQuestionAnsweringInput", + "TableQuestionAnsweringInputData", + "TableQuestionAnsweringOutputElement", + "TableQuestionAnsweringParameters", + "Text2TextGenerationInput", + "Text2TextGenerationOutput", + "Text2TextGenerationParameters", + "Text2TextGenerationTruncationStrategy", + "TextClassificationInput", + "TextClassificationOutputElement", + "TextClassificationOutputTransform", + "TextClassificationParameters", + "TextGenerationInput", + "TextGenerationInputGenerateParameters", + "TextGenerationInputGrammarType", + "TextGenerationOutput", + "TextGenerationOutputBestOfSequence", + "TextGenerationOutputDetails", + "TextGenerationOutputFinishReason", + "TextGenerationOutputPrefillToken", + "TextGenerationOutputToken", + "TextGenerationStreamOutput", + "TextGenerationStreamOutputStreamDetails", + "TextGenerationStreamOutputToken", + "TextToAudioEarlyStoppingEnum", + "TextToAudioGenerationParameters", + "TextToAudioInput", + "TextToAudioOutput", + "TextToAudioParameters", + "TextToImageInput", + "TextToImageOutput", + "TextToImageParameters", + "TextToSpeechEarlyStoppingEnum", + "TextToSpeechGenerationParameters", + "TextToSpeechInput", + "TextToSpeechOutput", + "TextToSpeechParameters", + "TextToVideoInput", + "TextToVideoOutput", + "TextToVideoParameters", + "TokenClassificationAggregationStrategy", + "TokenClassificationInput", + "TokenClassificationOutputElement", + "TokenClassificationParameters", + "TranslationInput", + "TranslationOutput", + "TranslationParameters", + "TranslationTruncationStrategy", + "TypeEnum", + "VideoClassificationInput", + "VideoClassificationOutputElement", + "VideoClassificationOutputTransform", + "VideoClassificationParameters", + "VisualQuestionAnsweringInput", + "VisualQuestionAnsweringInputData", + "VisualQuestionAnsweringOutputElement", + "VisualQuestionAnsweringParameters", + "ZeroShotClassificationInput", + "ZeroShotClassificationOutputElement", + "ZeroShotClassificationParameters", + "ZeroShotImageClassificationInput", + "ZeroShotImageClassificationOutputElement", + "ZeroShotImageClassificationParameters", + "ZeroShotObjectDetectionBoundingBox", + "ZeroShotObjectDetectionInput", + "ZeroShotObjectDetectionOutputElement", + "ZeroShotObjectDetectionParameters", + ], + "inference._mcp.agent": [ + "Agent", + ], + "inference._mcp.mcp_client": [ + "MCPClient", + ], + "inference_api": [ + "InferenceApi", + ], + "keras_mixin": [ + "KerasModelHubMixin", + "from_pretrained_keras", + "push_to_hub_keras", + "save_pretrained_keras", + ], + "repocard": [ + "DatasetCard", + "ModelCard", + "RepoCard", + "SpaceCard", + "metadata_eval_result", + "metadata_load", + "metadata_save", + "metadata_update", + ], + "repocard_data": [ + "CardData", + "DatasetCardData", + "EvalResult", + "ModelCardData", + "SpaceCardData", + ], + "repository": [ + "Repository", + ], + "serialization": [ + "StateDictSplit", + "get_tf_storage_size", + "get_torch_storage_id", + "get_torch_storage_size", + "load_state_dict_from_file", + "load_torch_model", + "save_torch_model", + "save_torch_state_dict", + "split_state_dict_into_shards_factory", + "split_tf_state_dict_into_shards", + "split_torch_state_dict_into_shards", + ], + "serialization._dduf": [ + "DDUFEntry", + "export_entries_as_dduf", + "export_folder_as_dduf", + "read_dduf_file", + ], + "utils": [ + "CacheNotFound", + "CachedFileInfo", + "CachedRepoInfo", + "CachedRevisionInfo", + "CorruptedCacheException", + "DeleteCacheStrategy", + "HFCacheInfo", + "HfFolder", + "cached_assets_path", + "configure_http_backend", + "dump_environment_info", + "get_session", + "get_token", + "logging", + "scan_cache_dir", + ], +} + +# WARNING: __all__ is generated automatically, Any manual edit will be lost when re-generating this file ! +# +# To update the static imports, please run the following command and commit the changes. +# ``` +# # Use script +# python utils/check_all_variable.py --update +# +# # Or run style on codebase +# make style +# ``` + +__all__ = [ + "Agent", + "AsyncInferenceClient", + "AudioClassificationInput", + "AudioClassificationOutputElement", + "AudioClassificationOutputTransform", + "AudioClassificationParameters", + "AudioToAudioInput", + "AudioToAudioOutputElement", + "AutomaticSpeechRecognitionEarlyStoppingEnum", + "AutomaticSpeechRecognitionGenerationParameters", + "AutomaticSpeechRecognitionInput", + "AutomaticSpeechRecognitionOutput", + "AutomaticSpeechRecognitionOutputChunk", + "AutomaticSpeechRecognitionParameters", + "CONFIG_NAME", + "CacheNotFound", + "CachedFileInfo", + "CachedRepoInfo", + "CachedRevisionInfo", + "CardData", + "ChatCompletionInput", + "ChatCompletionInputFunctionDefinition", + "ChatCompletionInputFunctionName", + "ChatCompletionInputGrammarType", + "ChatCompletionInputJSONSchema", + "ChatCompletionInputMessage", + "ChatCompletionInputMessageChunk", + "ChatCompletionInputMessageChunkType", + "ChatCompletionInputResponseFormatJSONObject", + "ChatCompletionInputResponseFormatJSONSchema", + "ChatCompletionInputResponseFormatText", + "ChatCompletionInputStreamOptions", + "ChatCompletionInputTool", + "ChatCompletionInputToolCall", + "ChatCompletionInputToolChoiceClass", + "ChatCompletionInputToolChoiceEnum", + "ChatCompletionInputURL", + "ChatCompletionOutput", + "ChatCompletionOutputComplete", + "ChatCompletionOutputFunctionDefinition", + "ChatCompletionOutputLogprob", + "ChatCompletionOutputLogprobs", + "ChatCompletionOutputMessage", + "ChatCompletionOutputToolCall", + "ChatCompletionOutputTopLogprob", + "ChatCompletionOutputUsage", + "ChatCompletionStreamOutput", + "ChatCompletionStreamOutputChoice", + "ChatCompletionStreamOutputDelta", + "ChatCompletionStreamOutputDeltaToolCall", + "ChatCompletionStreamOutputFunction", + "ChatCompletionStreamOutputLogprob", + "ChatCompletionStreamOutputLogprobs", + "ChatCompletionStreamOutputTopLogprob", + "ChatCompletionStreamOutputUsage", + "Collection", + "CollectionItem", + "CommitInfo", + "CommitOperation", + "CommitOperationAdd", + "CommitOperationCopy", + "CommitOperationDelete", + "CommitScheduler", + "CorruptedCacheException", + "DDUFEntry", + "DatasetCard", + "DatasetCardData", + "DatasetInfo", + "DeleteCacheStrategy", + "DepthEstimationInput", + "DepthEstimationOutput", + "Discussion", + "DiscussionComment", + "DiscussionCommit", + "DiscussionEvent", + "DiscussionStatusChange", + "DiscussionTitleChange", + "DiscussionWithDetails", + "DocumentQuestionAnsweringInput", + "DocumentQuestionAnsweringInputData", + "DocumentQuestionAnsweringOutputElement", + "DocumentQuestionAnsweringParameters", + "EvalResult", + "FLAX_WEIGHTS_NAME", + "FeatureExtractionInput", + "FeatureExtractionInputTruncationDirection", + "FillMaskInput", + "FillMaskOutputElement", + "FillMaskParameters", + "GitCommitInfo", + "GitRefInfo", + "GitRefs", + "HFCacheInfo", + "HFSummaryWriter", + "HUGGINGFACE_CO_URL_HOME", + "HUGGINGFACE_CO_URL_TEMPLATE", + "HfApi", + "HfFileMetadata", + "HfFileSystem", + "HfFileSystemFile", + "HfFileSystemResolvedPath", + "HfFileSystemStreamFile", + "HfFolder", + "ImageClassificationInput", + "ImageClassificationOutputElement", + "ImageClassificationOutputTransform", + "ImageClassificationParameters", + "ImageSegmentationInput", + "ImageSegmentationOutputElement", + "ImageSegmentationParameters", + "ImageSegmentationSubtask", + "ImageToImageInput", + "ImageToImageOutput", + "ImageToImageParameters", + "ImageToImageTargetSize", + "ImageToTextEarlyStoppingEnum", + "ImageToTextGenerationParameters", + "ImageToTextInput", + "ImageToTextOutput", + "ImageToTextParameters", + "ImageToVideoInput", + "ImageToVideoOutput", + "ImageToVideoParameters", + "ImageToVideoTargetSize", + "InferenceApi", + "InferenceClient", + "InferenceEndpoint", + "InferenceEndpointError", + "InferenceEndpointStatus", + "InferenceEndpointTimeoutError", + "InferenceEndpointType", + "InferenceTimeoutError", + "JobInfo", + "JobOwner", + "JobStage", + "JobStatus", + "KerasModelHubMixin", + "MCPClient", + "ModelCard", + "ModelCardData", + "ModelHubMixin", + "ModelInfo", + "OAuthInfo", + "OAuthOrgInfo", + "OAuthUserInfo", + "ObjectDetectionBoundingBox", + "ObjectDetectionInput", + "ObjectDetectionOutputElement", + "ObjectDetectionParameters", + "Organization", + "PYTORCH_WEIGHTS_NAME", + "Padding", + "PyTorchModelHubMixin", + "QuestionAnsweringInput", + "QuestionAnsweringInputData", + "QuestionAnsweringOutputElement", + "QuestionAnsweringParameters", + "REPO_TYPE_DATASET", + "REPO_TYPE_MODEL", + "REPO_TYPE_SPACE", + "RepoCard", + "RepoUrl", + "Repository", + "SentenceSimilarityInput", + "SentenceSimilarityInputData", + "SpaceCard", + "SpaceCardData", + "SpaceHardware", + "SpaceInfo", + "SpaceRuntime", + "SpaceStage", + "SpaceStorage", + "SpaceVariable", + "StateDictSplit", + "SummarizationInput", + "SummarizationOutput", + "SummarizationParameters", + "SummarizationTruncationStrategy", + "TF2_WEIGHTS_NAME", + "TF_WEIGHTS_NAME", + "TableQuestionAnsweringInput", + "TableQuestionAnsweringInputData", + "TableQuestionAnsweringOutputElement", + "TableQuestionAnsweringParameters", + "Text2TextGenerationInput", + "Text2TextGenerationOutput", + "Text2TextGenerationParameters", + "Text2TextGenerationTruncationStrategy", + "TextClassificationInput", + "TextClassificationOutputElement", + "TextClassificationOutputTransform", + "TextClassificationParameters", + "TextGenerationInput", + "TextGenerationInputGenerateParameters", + "TextGenerationInputGrammarType", + "TextGenerationOutput", + "TextGenerationOutputBestOfSequence", + "TextGenerationOutputDetails", + "TextGenerationOutputFinishReason", + "TextGenerationOutputPrefillToken", + "TextGenerationOutputToken", + "TextGenerationStreamOutput", + "TextGenerationStreamOutputStreamDetails", + "TextGenerationStreamOutputToken", + "TextToAudioEarlyStoppingEnum", + "TextToAudioGenerationParameters", + "TextToAudioInput", + "TextToAudioOutput", + "TextToAudioParameters", + "TextToImageInput", + "TextToImageOutput", + "TextToImageParameters", + "TextToSpeechEarlyStoppingEnum", + "TextToSpeechGenerationParameters", + "TextToSpeechInput", + "TextToSpeechOutput", + "TextToSpeechParameters", + "TextToVideoInput", + "TextToVideoOutput", + "TextToVideoParameters", + "TokenClassificationAggregationStrategy", + "TokenClassificationInput", + "TokenClassificationOutputElement", + "TokenClassificationParameters", + "TranslationInput", + "TranslationOutput", + "TranslationParameters", + "TranslationTruncationStrategy", + "TypeEnum", + "User", + "UserLikes", + "VideoClassificationInput", + "VideoClassificationOutputElement", + "VideoClassificationOutputTransform", + "VideoClassificationParameters", + "VisualQuestionAnsweringInput", + "VisualQuestionAnsweringInputData", + "VisualQuestionAnsweringOutputElement", + "VisualQuestionAnsweringParameters", + "WebhookInfo", + "WebhookPayload", + "WebhookPayloadComment", + "WebhookPayloadDiscussion", + "WebhookPayloadDiscussionChanges", + "WebhookPayloadEvent", + "WebhookPayloadMovedTo", + "WebhookPayloadRepo", + "WebhookPayloadUrl", + "WebhookPayloadWebhook", + "WebhookWatchedItem", + "WebhooksServer", + "ZeroShotClassificationInput", + "ZeroShotClassificationOutputElement", + "ZeroShotClassificationParameters", + "ZeroShotImageClassificationInput", + "ZeroShotImageClassificationOutputElement", + "ZeroShotImageClassificationParameters", + "ZeroShotObjectDetectionBoundingBox", + "ZeroShotObjectDetectionInput", + "ZeroShotObjectDetectionOutputElement", + "ZeroShotObjectDetectionParameters", + "_CACHED_NO_EXIST", + "_save_pretrained_fastai", + "accept_access_request", + "add_collection_item", + "add_space_secret", + "add_space_variable", + "attach_huggingface_oauth", + "auth_check", + "auth_list", + "auth_switch", + "cached_assets_path", + "cancel_access_request", + "cancel_job", + "change_discussion_status", + "comment_discussion", + "configure_http_backend", + "create_branch", + "create_collection", + "create_commit", + "create_discussion", + "create_inference_endpoint", + "create_inference_endpoint_from_catalog", + "create_pull_request", + "create_repo", + "create_scheduled_job", + "create_scheduled_uv_job", + "create_tag", + "create_webhook", + "dataset_info", + "delete_branch", + "delete_collection", + "delete_collection_item", + "delete_file", + "delete_folder", + "delete_inference_endpoint", + "delete_repo", + "delete_scheduled_job", + "delete_space_secret", + "delete_space_storage", + "delete_space_variable", + "delete_tag", + "delete_webhook", + "disable_webhook", + "dump_environment_info", + "duplicate_space", + "edit_discussion_comment", + "enable_webhook", + "export_entries_as_dduf", + "export_folder_as_dduf", + "fetch_job_logs", + "file_exists", + "from_pretrained_fastai", + "from_pretrained_keras", + "get_collection", + "get_dataset_tags", + "get_discussion_details", + "get_full_repo_name", + "get_hf_file_metadata", + "get_inference_endpoint", + "get_model_tags", + "get_organization_overview", + "get_paths_info", + "get_repo_discussions", + "get_safetensors_metadata", + "get_session", + "get_space_runtime", + "get_space_variables", + "get_tf_storage_size", + "get_token", + "get_token_permission", + "get_torch_storage_id", + "get_torch_storage_size", + "get_user_overview", + "get_webhook", + "grant_access", + "hf_hub_download", + "hf_hub_url", + "inspect_job", + "inspect_scheduled_job", + "interpreter_login", + "list_accepted_access_requests", + "list_collections", + "list_datasets", + "list_inference_catalog", + "list_inference_endpoints", + "list_jobs", + "list_lfs_files", + "list_liked_repos", + "list_models", + "list_organization_members", + "list_papers", + "list_pending_access_requests", + "list_rejected_access_requests", + "list_repo_commits", + "list_repo_files", + "list_repo_likers", + "list_repo_refs", + "list_repo_tree", + "list_spaces", + "list_user_followers", + "list_user_following", + "list_webhooks", + "load_state_dict_from_file", + "load_torch_model", + "logging", + "login", + "logout", + "merge_pull_request", + "metadata_eval_result", + "metadata_load", + "metadata_save", + "metadata_update", + "model_info", + "move_repo", + "notebook_login", + "paper_info", + "parse_huggingface_oauth", + "parse_safetensors_file_metadata", + "pause_inference_endpoint", + "pause_space", + "permanently_delete_lfs_files", + "preupload_lfs_files", + "push_to_hub_fastai", + "push_to_hub_keras", + "read_dduf_file", + "reject_access_request", + "rename_discussion", + "repo_exists", + "repo_info", + "repo_type_and_id_from_hf_id", + "request_space_hardware", + "request_space_storage", + "restart_space", + "resume_inference_endpoint", + "resume_scheduled_job", + "revision_exists", + "run_as_future", + "run_job", + "run_uv_job", + "save_pretrained_keras", + "save_torch_model", + "save_torch_state_dict", + "scale_to_zero_inference_endpoint", + "scan_cache_dir", + "set_space_sleep_time", + "snapshot_download", + "space_info", + "split_state_dict_into_shards_factory", + "split_tf_state_dict_into_shards", + "split_torch_state_dict_into_shards", + "super_squash_history", + "suspend_scheduled_job", + "try_to_load_from_cache", + "unlike", + "update_collection_item", + "update_collection_metadata", + "update_inference_endpoint", + "update_repo_settings", + "update_repo_visibility", + "update_webhook", + "upload_file", + "upload_folder", + "upload_large_folder", + "webhook_endpoint", + "whoami", +] + + +def _attach(package_name, submodules=None, submod_attrs=None): + """Attach lazily loaded submodules, functions, or other attributes. + + Typically, modules import submodules and attributes as follows: + + ```py + import mysubmodule + import anothersubmodule + + from .foo import someattr + ``` + + The idea is to replace a package's `__getattr__`, `__dir__`, such that all imports + work exactly the way they would with normal imports, except that the import occurs + upon first use. + + The typical way to call this function, replacing the above imports, is: + + ```python + __getattr__, __dir__ = lazy.attach( + __name__, + ['mysubmodule', 'anothersubmodule'], + {'foo': ['someattr']} + ) + ``` + This functionality requires Python 3.7 or higher. + + Args: + package_name (`str`): + Typically use `__name__`. + submodules (`set`): + List of submodules to attach. + submod_attrs (`dict`): + Dictionary of submodule -> list of attributes / functions. + These attributes are imported as they are used. + + Returns: + __getattr__, __dir__, __all__ + + """ + if submod_attrs is None: + submod_attrs = {} + + if submodules is None: + submodules = set() + else: + submodules = set(submodules) + + attr_to_modules = {attr: mod for mod, attrs in submod_attrs.items() for attr in attrs} + + def __getattr__(name): + if name in submodules: + try: + return importlib.import_module(f"{package_name}.{name}") + except Exception as e: + print(f"Error importing {package_name}.{name}: {e}") + raise + elif name in attr_to_modules: + submod_path = f"{package_name}.{attr_to_modules[name]}" + try: + submod = importlib.import_module(submod_path) + except Exception as e: + print(f"Error importing {submod_path}: {e}") + raise + attr = getattr(submod, name) + + # If the attribute lives in a file (module) with the same + # name as the attribute, ensure that the attribute and *not* + # the module is accessible on the package. + if name == attr_to_modules[name]: + pkg = sys.modules[package_name] + pkg.__dict__[name] = attr + + return attr + else: + raise AttributeError(f"No {package_name} attribute {name}") + + def __dir__(): + return __all__ + + return __getattr__, __dir__ + + +__getattr__, __dir__ = _attach(__name__, submodules=[], submod_attrs=_SUBMOD_ATTRS) + +if os.environ.get("EAGER_IMPORT", ""): + for attr in __all__: + __getattr__(attr) + +# WARNING: any content below this statement is generated automatically. Any manual edit +# will be lost when re-generating this file ! +# +# To update the static imports, please run the following command and commit the changes. +# ``` +# # Use script +# python utils/check_static_imports.py --update +# +# # Or run style on codebase +# make style +# ``` +if TYPE_CHECKING: # pragma: no cover + from ._commit_scheduler import CommitScheduler # noqa: F401 + from ._inference_endpoints import ( + InferenceEndpoint, # noqa: F401 + InferenceEndpointError, # noqa: F401 + InferenceEndpointStatus, # noqa: F401 + InferenceEndpointTimeoutError, # noqa: F401 + InferenceEndpointType, # noqa: F401 + ) + from ._jobs_api import ( + JobInfo, # noqa: F401 + JobOwner, # noqa: F401 + JobStage, # noqa: F401 + JobStatus, # noqa: F401 + ) + from ._login import ( + auth_list, # noqa: F401 + auth_switch, # noqa: F401 + interpreter_login, # noqa: F401 + login, # noqa: F401 + logout, # noqa: F401 + notebook_login, # noqa: F401 + ) + from ._oauth import ( + OAuthInfo, # noqa: F401 + OAuthOrgInfo, # noqa: F401 + OAuthUserInfo, # noqa: F401 + attach_huggingface_oauth, # noqa: F401 + parse_huggingface_oauth, # noqa: F401 + ) + from ._snapshot_download import snapshot_download # noqa: F401 + from ._space_api import ( + SpaceHardware, # noqa: F401 + SpaceRuntime, # noqa: F401 + SpaceStage, # noqa: F401 + SpaceStorage, # noqa: F401 + SpaceVariable, # noqa: F401 + ) + from ._tensorboard_logger import HFSummaryWriter # noqa: F401 + from ._webhooks_payload import ( + WebhookPayload, # noqa: F401 + WebhookPayloadComment, # noqa: F401 + WebhookPayloadDiscussion, # noqa: F401 + WebhookPayloadDiscussionChanges, # noqa: F401 + WebhookPayloadEvent, # noqa: F401 + WebhookPayloadMovedTo, # noqa: F401 + WebhookPayloadRepo, # noqa: F401 + WebhookPayloadUrl, # noqa: F401 + WebhookPayloadWebhook, # noqa: F401 + ) + from ._webhooks_server import ( + WebhooksServer, # noqa: F401 + webhook_endpoint, # noqa: F401 + ) + from .community import ( + Discussion, # noqa: F401 + DiscussionComment, # noqa: F401 + DiscussionCommit, # noqa: F401 + DiscussionEvent, # noqa: F401 + DiscussionStatusChange, # noqa: F401 + DiscussionTitleChange, # noqa: F401 + DiscussionWithDetails, # noqa: F401 + ) + from .constants import ( + CONFIG_NAME, # noqa: F401 + FLAX_WEIGHTS_NAME, # noqa: F401 + HUGGINGFACE_CO_URL_HOME, # noqa: F401 + HUGGINGFACE_CO_URL_TEMPLATE, # noqa: F401 + PYTORCH_WEIGHTS_NAME, # noqa: F401 + REPO_TYPE_DATASET, # noqa: F401 + REPO_TYPE_MODEL, # noqa: F401 + REPO_TYPE_SPACE, # noqa: F401 + TF2_WEIGHTS_NAME, # noqa: F401 + TF_WEIGHTS_NAME, # noqa: F401 + ) + from .fastai_utils import ( + _save_pretrained_fastai, # noqa: F401 + from_pretrained_fastai, # noqa: F401 + push_to_hub_fastai, # noqa: F401 + ) + from .file_download import ( + _CACHED_NO_EXIST, # noqa: F401 + HfFileMetadata, # noqa: F401 + get_hf_file_metadata, # noqa: F401 + hf_hub_download, # noqa: F401 + hf_hub_url, # noqa: F401 + try_to_load_from_cache, # noqa: F401 + ) + from .hf_api import ( + Collection, # noqa: F401 + CollectionItem, # noqa: F401 + CommitInfo, # noqa: F401 + CommitOperation, # noqa: F401 + CommitOperationAdd, # noqa: F401 + CommitOperationCopy, # noqa: F401 + CommitOperationDelete, # noqa: F401 + DatasetInfo, # noqa: F401 + GitCommitInfo, # noqa: F401 + GitRefInfo, # noqa: F401 + GitRefs, # noqa: F401 + HfApi, # noqa: F401 + ModelInfo, # noqa: F401 + Organization, # noqa: F401 + RepoUrl, # noqa: F401 + SpaceInfo, # noqa: F401 + User, # noqa: F401 + UserLikes, # noqa: F401 + WebhookInfo, # noqa: F401 + WebhookWatchedItem, # noqa: F401 + accept_access_request, # noqa: F401 + add_collection_item, # noqa: F401 + add_space_secret, # noqa: F401 + add_space_variable, # noqa: F401 + auth_check, # noqa: F401 + cancel_access_request, # noqa: F401 + cancel_job, # noqa: F401 + change_discussion_status, # noqa: F401 + comment_discussion, # noqa: F401 + create_branch, # noqa: F401 + create_collection, # noqa: F401 + create_commit, # noqa: F401 + create_discussion, # noqa: F401 + create_inference_endpoint, # noqa: F401 + create_inference_endpoint_from_catalog, # noqa: F401 + create_pull_request, # noqa: F401 + create_repo, # noqa: F401 + create_scheduled_job, # noqa: F401 + create_scheduled_uv_job, # noqa: F401 + create_tag, # noqa: F401 + create_webhook, # noqa: F401 + dataset_info, # noqa: F401 + delete_branch, # noqa: F401 + delete_collection, # noqa: F401 + delete_collection_item, # noqa: F401 + delete_file, # noqa: F401 + delete_folder, # noqa: F401 + delete_inference_endpoint, # noqa: F401 + delete_repo, # noqa: F401 + delete_scheduled_job, # noqa: F401 + delete_space_secret, # noqa: F401 + delete_space_storage, # noqa: F401 + delete_space_variable, # noqa: F401 + delete_tag, # noqa: F401 + delete_webhook, # noqa: F401 + disable_webhook, # noqa: F401 + duplicate_space, # noqa: F401 + edit_discussion_comment, # noqa: F401 + enable_webhook, # noqa: F401 + fetch_job_logs, # noqa: F401 + file_exists, # noqa: F401 + get_collection, # noqa: F401 + get_dataset_tags, # noqa: F401 + get_discussion_details, # noqa: F401 + get_full_repo_name, # noqa: F401 + get_inference_endpoint, # noqa: F401 + get_model_tags, # noqa: F401 + get_organization_overview, # noqa: F401 + get_paths_info, # noqa: F401 + get_repo_discussions, # noqa: F401 + get_safetensors_metadata, # noqa: F401 + get_space_runtime, # noqa: F401 + get_space_variables, # noqa: F401 + get_token_permission, # noqa: F401 + get_user_overview, # noqa: F401 + get_webhook, # noqa: F401 + grant_access, # noqa: F401 + inspect_job, # noqa: F401 + inspect_scheduled_job, # noqa: F401 + list_accepted_access_requests, # noqa: F401 + list_collections, # noqa: F401 + list_datasets, # noqa: F401 + list_inference_catalog, # noqa: F401 + list_inference_endpoints, # noqa: F401 + list_jobs, # noqa: F401 + list_lfs_files, # noqa: F401 + list_liked_repos, # noqa: F401 + list_models, # noqa: F401 + list_organization_members, # noqa: F401 + list_papers, # noqa: F401 + list_pending_access_requests, # noqa: F401 + list_rejected_access_requests, # noqa: F401 + list_repo_commits, # noqa: F401 + list_repo_files, # noqa: F401 + list_repo_likers, # noqa: F401 + list_repo_refs, # noqa: F401 + list_repo_tree, # noqa: F401 + list_spaces, # noqa: F401 + list_user_followers, # noqa: F401 + list_user_following, # noqa: F401 + list_webhooks, # noqa: F401 + merge_pull_request, # noqa: F401 + model_info, # noqa: F401 + move_repo, # noqa: F401 + paper_info, # noqa: F401 + parse_safetensors_file_metadata, # noqa: F401 + pause_inference_endpoint, # noqa: F401 + pause_space, # noqa: F401 + permanently_delete_lfs_files, # noqa: F401 + preupload_lfs_files, # noqa: F401 + reject_access_request, # noqa: F401 + rename_discussion, # noqa: F401 + repo_exists, # noqa: F401 + repo_info, # noqa: F401 + repo_type_and_id_from_hf_id, # noqa: F401 + request_space_hardware, # noqa: F401 + request_space_storage, # noqa: F401 + restart_space, # noqa: F401 + resume_inference_endpoint, # noqa: F401 + resume_scheduled_job, # noqa: F401 + revision_exists, # noqa: F401 + run_as_future, # noqa: F401 + run_job, # noqa: F401 + run_uv_job, # noqa: F401 + scale_to_zero_inference_endpoint, # noqa: F401 + set_space_sleep_time, # noqa: F401 + space_info, # noqa: F401 + super_squash_history, # noqa: F401 + suspend_scheduled_job, # noqa: F401 + unlike, # noqa: F401 + update_collection_item, # noqa: F401 + update_collection_metadata, # noqa: F401 + update_inference_endpoint, # noqa: F401 + update_repo_settings, # noqa: F401 + update_repo_visibility, # noqa: F401 + update_webhook, # noqa: F401 + upload_file, # noqa: F401 + upload_folder, # noqa: F401 + upload_large_folder, # noqa: F401 + whoami, # noqa: F401 + ) + from .hf_file_system import ( + HfFileSystem, # noqa: F401 + HfFileSystemFile, # noqa: F401 + HfFileSystemResolvedPath, # noqa: F401 + HfFileSystemStreamFile, # noqa: F401 + ) + from .hub_mixin import ( + ModelHubMixin, # noqa: F401 + PyTorchModelHubMixin, # noqa: F401 + ) + from .inference._client import ( + InferenceClient, # noqa: F401 + InferenceTimeoutError, # noqa: F401 + ) + from .inference._generated._async_client import AsyncInferenceClient # noqa: F401 + from .inference._generated.types import ( + AudioClassificationInput, # noqa: F401 + AudioClassificationOutputElement, # noqa: F401 + AudioClassificationOutputTransform, # noqa: F401 + AudioClassificationParameters, # noqa: F401 + AudioToAudioInput, # noqa: F401 + AudioToAudioOutputElement, # noqa: F401 + AutomaticSpeechRecognitionEarlyStoppingEnum, # noqa: F401 + AutomaticSpeechRecognitionGenerationParameters, # noqa: F401 + AutomaticSpeechRecognitionInput, # noqa: F401 + AutomaticSpeechRecognitionOutput, # noqa: F401 + AutomaticSpeechRecognitionOutputChunk, # noqa: F401 + AutomaticSpeechRecognitionParameters, # noqa: F401 + ChatCompletionInput, # noqa: F401 + ChatCompletionInputFunctionDefinition, # noqa: F401 + ChatCompletionInputFunctionName, # noqa: F401 + ChatCompletionInputGrammarType, # noqa: F401 + ChatCompletionInputJSONSchema, # noqa: F401 + ChatCompletionInputMessage, # noqa: F401 + ChatCompletionInputMessageChunk, # noqa: F401 + ChatCompletionInputMessageChunkType, # noqa: F401 + ChatCompletionInputResponseFormatJSONObject, # noqa: F401 + ChatCompletionInputResponseFormatJSONSchema, # noqa: F401 + ChatCompletionInputResponseFormatText, # noqa: F401 + ChatCompletionInputStreamOptions, # noqa: F401 + ChatCompletionInputTool, # noqa: F401 + ChatCompletionInputToolCall, # noqa: F401 + ChatCompletionInputToolChoiceClass, # noqa: F401 + ChatCompletionInputToolChoiceEnum, # noqa: F401 + ChatCompletionInputURL, # noqa: F401 + ChatCompletionOutput, # noqa: F401 + ChatCompletionOutputComplete, # noqa: F401 + ChatCompletionOutputFunctionDefinition, # noqa: F401 + ChatCompletionOutputLogprob, # noqa: F401 + ChatCompletionOutputLogprobs, # noqa: F401 + ChatCompletionOutputMessage, # noqa: F401 + ChatCompletionOutputToolCall, # noqa: F401 + ChatCompletionOutputTopLogprob, # noqa: F401 + ChatCompletionOutputUsage, # noqa: F401 + ChatCompletionStreamOutput, # noqa: F401 + ChatCompletionStreamOutputChoice, # noqa: F401 + ChatCompletionStreamOutputDelta, # noqa: F401 + ChatCompletionStreamOutputDeltaToolCall, # noqa: F401 + ChatCompletionStreamOutputFunction, # noqa: F401 + ChatCompletionStreamOutputLogprob, # noqa: F401 + ChatCompletionStreamOutputLogprobs, # noqa: F401 + ChatCompletionStreamOutputTopLogprob, # noqa: F401 + ChatCompletionStreamOutputUsage, # noqa: F401 + DepthEstimationInput, # noqa: F401 + DepthEstimationOutput, # noqa: F401 + DocumentQuestionAnsweringInput, # noqa: F401 + DocumentQuestionAnsweringInputData, # noqa: F401 + DocumentQuestionAnsweringOutputElement, # noqa: F401 + DocumentQuestionAnsweringParameters, # noqa: F401 + FeatureExtractionInput, # noqa: F401 + FeatureExtractionInputTruncationDirection, # noqa: F401 + FillMaskInput, # noqa: F401 + FillMaskOutputElement, # noqa: F401 + FillMaskParameters, # noqa: F401 + ImageClassificationInput, # noqa: F401 + ImageClassificationOutputElement, # noqa: F401 + ImageClassificationOutputTransform, # noqa: F401 + ImageClassificationParameters, # noqa: F401 + ImageSegmentationInput, # noqa: F401 + ImageSegmentationOutputElement, # noqa: F401 + ImageSegmentationParameters, # noqa: F401 + ImageSegmentationSubtask, # noqa: F401 + ImageToImageInput, # noqa: F401 + ImageToImageOutput, # noqa: F401 + ImageToImageParameters, # noqa: F401 + ImageToImageTargetSize, # noqa: F401 + ImageToTextEarlyStoppingEnum, # noqa: F401 + ImageToTextGenerationParameters, # noqa: F401 + ImageToTextInput, # noqa: F401 + ImageToTextOutput, # noqa: F401 + ImageToTextParameters, # noqa: F401 + ImageToVideoInput, # noqa: F401 + ImageToVideoOutput, # noqa: F401 + ImageToVideoParameters, # noqa: F401 + ImageToVideoTargetSize, # noqa: F401 + ObjectDetectionBoundingBox, # noqa: F401 + ObjectDetectionInput, # noqa: F401 + ObjectDetectionOutputElement, # noqa: F401 + ObjectDetectionParameters, # noqa: F401 + Padding, # noqa: F401 + QuestionAnsweringInput, # noqa: F401 + QuestionAnsweringInputData, # noqa: F401 + QuestionAnsweringOutputElement, # noqa: F401 + QuestionAnsweringParameters, # noqa: F401 + SentenceSimilarityInput, # noqa: F401 + SentenceSimilarityInputData, # noqa: F401 + SummarizationInput, # noqa: F401 + SummarizationOutput, # noqa: F401 + SummarizationParameters, # noqa: F401 + SummarizationTruncationStrategy, # noqa: F401 + TableQuestionAnsweringInput, # noqa: F401 + TableQuestionAnsweringInputData, # noqa: F401 + TableQuestionAnsweringOutputElement, # noqa: F401 + TableQuestionAnsweringParameters, # noqa: F401 + Text2TextGenerationInput, # noqa: F401 + Text2TextGenerationOutput, # noqa: F401 + Text2TextGenerationParameters, # noqa: F401 + Text2TextGenerationTruncationStrategy, # noqa: F401 + TextClassificationInput, # noqa: F401 + TextClassificationOutputElement, # noqa: F401 + TextClassificationOutputTransform, # noqa: F401 + TextClassificationParameters, # noqa: F401 + TextGenerationInput, # noqa: F401 + TextGenerationInputGenerateParameters, # noqa: F401 + TextGenerationInputGrammarType, # noqa: F401 + TextGenerationOutput, # noqa: F401 + TextGenerationOutputBestOfSequence, # noqa: F401 + TextGenerationOutputDetails, # noqa: F401 + TextGenerationOutputFinishReason, # noqa: F401 + TextGenerationOutputPrefillToken, # noqa: F401 + TextGenerationOutputToken, # noqa: F401 + TextGenerationStreamOutput, # noqa: F401 + TextGenerationStreamOutputStreamDetails, # noqa: F401 + TextGenerationStreamOutputToken, # noqa: F401 + TextToAudioEarlyStoppingEnum, # noqa: F401 + TextToAudioGenerationParameters, # noqa: F401 + TextToAudioInput, # noqa: F401 + TextToAudioOutput, # noqa: F401 + TextToAudioParameters, # noqa: F401 + TextToImageInput, # noqa: F401 + TextToImageOutput, # noqa: F401 + TextToImageParameters, # noqa: F401 + TextToSpeechEarlyStoppingEnum, # noqa: F401 + TextToSpeechGenerationParameters, # noqa: F401 + TextToSpeechInput, # noqa: F401 + TextToSpeechOutput, # noqa: F401 + TextToSpeechParameters, # noqa: F401 + TextToVideoInput, # noqa: F401 + TextToVideoOutput, # noqa: F401 + TextToVideoParameters, # noqa: F401 + TokenClassificationAggregationStrategy, # noqa: F401 + TokenClassificationInput, # noqa: F401 + TokenClassificationOutputElement, # noqa: F401 + TokenClassificationParameters, # noqa: F401 + TranslationInput, # noqa: F401 + TranslationOutput, # noqa: F401 + TranslationParameters, # noqa: F401 + TranslationTruncationStrategy, # noqa: F401 + TypeEnum, # noqa: F401 + VideoClassificationInput, # noqa: F401 + VideoClassificationOutputElement, # noqa: F401 + VideoClassificationOutputTransform, # noqa: F401 + VideoClassificationParameters, # noqa: F401 + VisualQuestionAnsweringInput, # noqa: F401 + VisualQuestionAnsweringInputData, # noqa: F401 + VisualQuestionAnsweringOutputElement, # noqa: F401 + VisualQuestionAnsweringParameters, # noqa: F401 + ZeroShotClassificationInput, # noqa: F401 + ZeroShotClassificationOutputElement, # noqa: F401 + ZeroShotClassificationParameters, # noqa: F401 + ZeroShotImageClassificationInput, # noqa: F401 + ZeroShotImageClassificationOutputElement, # noqa: F401 + ZeroShotImageClassificationParameters, # noqa: F401 + ZeroShotObjectDetectionBoundingBox, # noqa: F401 + ZeroShotObjectDetectionInput, # noqa: F401 + ZeroShotObjectDetectionOutputElement, # noqa: F401 + ZeroShotObjectDetectionParameters, # noqa: F401 + ) + from .inference._mcp.agent import Agent # noqa: F401 + from .inference._mcp.mcp_client import MCPClient # noqa: F401 + from .inference_api import InferenceApi # noqa: F401 + from .keras_mixin import ( + KerasModelHubMixin, # noqa: F401 + from_pretrained_keras, # noqa: F401 + push_to_hub_keras, # noqa: F401 + save_pretrained_keras, # noqa: F401 + ) + from .repocard import ( + DatasetCard, # noqa: F401 + ModelCard, # noqa: F401 + RepoCard, # noqa: F401 + SpaceCard, # noqa: F401 + metadata_eval_result, # noqa: F401 + metadata_load, # noqa: F401 + metadata_save, # noqa: F401 + metadata_update, # noqa: F401 + ) + from .repocard_data import ( + CardData, # noqa: F401 + DatasetCardData, # noqa: F401 + EvalResult, # noqa: F401 + ModelCardData, # noqa: F401 + SpaceCardData, # noqa: F401 + ) + from .repository import Repository # noqa: F401 + from .serialization import ( + StateDictSplit, # noqa: F401 + get_tf_storage_size, # noqa: F401 + get_torch_storage_id, # noqa: F401 + get_torch_storage_size, # noqa: F401 + load_state_dict_from_file, # noqa: F401 + load_torch_model, # noqa: F401 + save_torch_model, # noqa: F401 + save_torch_state_dict, # noqa: F401 + split_state_dict_into_shards_factory, # noqa: F401 + split_tf_state_dict_into_shards, # noqa: F401 + split_torch_state_dict_into_shards, # noqa: F401 + ) + from .serialization._dduf import ( + DDUFEntry, # noqa: F401 + export_entries_as_dduf, # noqa: F401 + export_folder_as_dduf, # noqa: F401 + read_dduf_file, # noqa: F401 + ) + from .utils import ( + CachedFileInfo, # noqa: F401 + CachedRepoInfo, # noqa: F401 + CachedRevisionInfo, # noqa: F401 + CacheNotFound, # noqa: F401 + CorruptedCacheException, # noqa: F401 + DeleteCacheStrategy, # noqa: F401 + HFCacheInfo, # noqa: F401 + HfFolder, # noqa: F401 + cached_assets_path, # noqa: F401 + configure_http_backend, # noqa: F401 + dump_environment_info, # noqa: F401 + get_session, # noqa: F401 + get_token, # noqa: F401 + logging, # noqa: F401 + scan_cache_dir, # noqa: F401 + ) diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/_commit_api.py b/venv/lib/python3.13/site-packages/huggingface_hub/_commit_api.py new file mode 100644 index 0000000000000000000000000000000000000000..7ed64b0e5ed550c392f193239a2e00669cc3144a --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/_commit_api.py @@ -0,0 +1,968 @@ +""" +Type definitions and utilities for the `create_commit` API +""" + +import base64 +import io +import os +import warnings +from collections import defaultdict +from contextlib import contextmanager +from dataclasses import dataclass, field +from itertools import groupby +from pathlib import Path, PurePosixPath +from typing import TYPE_CHECKING, Any, BinaryIO, Dict, Iterable, Iterator, List, Literal, Optional, Tuple, Union + +from tqdm.contrib.concurrent import thread_map + +from . import constants +from .errors import EntryNotFoundError, HfHubHTTPError, XetAuthorizationError, XetRefreshTokenError +from .file_download import hf_hub_url +from .lfs import UploadInfo, lfs_upload, post_lfs_batch_info +from .utils import ( + FORBIDDEN_FOLDERS, + XetTokenType, + are_progress_bars_disabled, + chunk_iterable, + fetch_xet_connection_info_from_repo_info, + get_session, + hf_raise_for_status, + logging, + sha, + tqdm_stream_file, + validate_hf_hub_args, +) +from .utils import tqdm as hf_tqdm +from .utils._runtime import is_xet_available + + +if TYPE_CHECKING: + from .hf_api import RepoFile + + +logger = logging.get_logger(__name__) + + +UploadMode = Literal["lfs", "regular"] + +# Max is 1,000 per request on the Hub for HfApi.get_paths_info +# Otherwise we get: +# HfHubHTTPError: 413 Client Error: Payload Too Large for url: https://huggingface.co/api/datasets/xxx (Request ID: xxx)\n\ntoo many parameters +# See https://github.com/huggingface/huggingface_hub/issues/1503 +FETCH_LFS_BATCH_SIZE = 500 + +UPLOAD_BATCH_MAX_NUM_FILES = 256 + + +@dataclass +class CommitOperationDelete: + """ + Data structure holding necessary info to delete a file or a folder from a repository + on the Hub. + + Args: + path_in_repo (`str`): + Relative filepath in the repo, for example: `"checkpoints/1fec34a/weights.bin"` + for a file or `"checkpoints/1fec34a/"` for a folder. + is_folder (`bool` or `Literal["auto"]`, *optional*) + Whether the Delete Operation applies to a folder or not. If "auto", the path + type (file or folder) is guessed automatically by looking if path ends with + a "/" (folder) or not (file). To explicitly set the path type, you can set + `is_folder=True` or `is_folder=False`. + """ + + path_in_repo: str + is_folder: Union[bool, Literal["auto"]] = "auto" + + def __post_init__(self): + self.path_in_repo = _validate_path_in_repo(self.path_in_repo) + + if self.is_folder == "auto": + self.is_folder = self.path_in_repo.endswith("/") + if not isinstance(self.is_folder, bool): + raise ValueError( + f"Wrong value for `is_folder`. Must be one of [`True`, `False`, `'auto'`]. Got '{self.is_folder}'." + ) + + +@dataclass +class CommitOperationCopy: + """ + Data structure holding necessary info to copy a file in a repository on the Hub. + + Limitations: + - Only LFS files can be copied. To copy a regular file, you need to download it locally and re-upload it + - Cross-repository copies are not supported. + + Note: you can combine a [`CommitOperationCopy`] and a [`CommitOperationDelete`] to rename an LFS file on the Hub. + + Args: + src_path_in_repo (`str`): + Relative filepath in the repo of the file to be copied, e.g. `"checkpoints/1fec34a/weights.bin"`. + path_in_repo (`str`): + Relative filepath in the repo where to copy the file, e.g. `"checkpoints/1fec34a/weights_copy.bin"`. + src_revision (`str`, *optional*): + The git revision of the file to be copied. Can be any valid git revision. + Default to the target commit revision. + """ + + src_path_in_repo: str + path_in_repo: str + src_revision: Optional[str] = None + # set to the OID of the file to be copied if it has already been uploaded + # useful to determine if a commit will be empty or not. + _src_oid: Optional[str] = None + # set to the OID of the file to copy to if it has already been uploaded + # useful to determine if a commit will be empty or not. + _dest_oid: Optional[str] = None + + def __post_init__(self): + self.src_path_in_repo = _validate_path_in_repo(self.src_path_in_repo) + self.path_in_repo = _validate_path_in_repo(self.path_in_repo) + + +@dataclass +class CommitOperationAdd: + """ + Data structure holding necessary info to upload a file to a repository on the Hub. + + Args: + path_in_repo (`str`): + Relative filepath in the repo, for example: `"checkpoints/1fec34a/weights.bin"` + path_or_fileobj (`str`, `Path`, `bytes`, or `BinaryIO`): + Either: + - a path to a local file (as `str` or `pathlib.Path`) to upload + - a buffer of bytes (`bytes`) holding the content of the file to upload + - a "file object" (subclass of `io.BufferedIOBase`), typically obtained + with `open(path, "rb")`. It must support `seek()` and `tell()` methods. + + Raises: + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + If `path_or_fileobj` is not one of `str`, `Path`, `bytes` or `io.BufferedIOBase`. + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + If `path_or_fileobj` is a `str` or `Path` but not a path to an existing file. + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + If `path_or_fileobj` is a `io.BufferedIOBase` but it doesn't support both + `seek()` and `tell()`. + """ + + path_in_repo: str + path_or_fileobj: Union[str, Path, bytes, BinaryIO] + upload_info: UploadInfo = field(init=False, repr=False) + + # Internal attributes + + # set to "lfs" or "regular" once known + _upload_mode: Optional[UploadMode] = field(init=False, repr=False, default=None) + + # set to True if .gitignore rules prevent the file from being uploaded as LFS + # (server-side check) + _should_ignore: Optional[bool] = field(init=False, repr=False, default=None) + + # set to the remote OID of the file if it has already been uploaded + # useful to determine if a commit will be empty or not + _remote_oid: Optional[str] = field(init=False, repr=False, default=None) + + # set to True once the file has been uploaded as LFS + _is_uploaded: bool = field(init=False, repr=False, default=False) + + # set to True once the file has been committed + _is_committed: bool = field(init=False, repr=False, default=False) + + def __post_init__(self) -> None: + """Validates `path_or_fileobj` and compute `upload_info`.""" + self.path_in_repo = _validate_path_in_repo(self.path_in_repo) + + # Validate `path_or_fileobj` value + if isinstance(self.path_or_fileobj, Path): + self.path_or_fileobj = str(self.path_or_fileobj) + if isinstance(self.path_or_fileobj, str): + path_or_fileobj = os.path.normpath(os.path.expanduser(self.path_or_fileobj)) + if not os.path.isfile(path_or_fileobj): + raise ValueError(f"Provided path: '{path_or_fileobj}' is not a file on the local file system") + elif not isinstance(self.path_or_fileobj, (io.BufferedIOBase, bytes)): + # ^^ Inspired from: https://stackoverflow.com/questions/44584829/how-to-determine-if-file-is-opened-in-binary-or-text-mode + raise ValueError( + "path_or_fileobj must be either an instance of str, bytes or" + " io.BufferedIOBase. If you passed a file-like object, make sure it is" + " in binary mode." + ) + if isinstance(self.path_or_fileobj, io.BufferedIOBase): + try: + self.path_or_fileobj.tell() + self.path_or_fileobj.seek(0, os.SEEK_CUR) + except (OSError, AttributeError) as exc: + raise ValueError( + "path_or_fileobj is a file-like object but does not implement seek() and tell()" + ) from exc + + # Compute "upload_info" attribute + if isinstance(self.path_or_fileobj, str): + self.upload_info = UploadInfo.from_path(self.path_or_fileobj) + elif isinstance(self.path_or_fileobj, bytes): + self.upload_info = UploadInfo.from_bytes(self.path_or_fileobj) + else: + self.upload_info = UploadInfo.from_fileobj(self.path_or_fileobj) + + @contextmanager + def as_file(self, with_tqdm: bool = False) -> Iterator[BinaryIO]: + """ + A context manager that yields a file-like object allowing to read the underlying + data behind `path_or_fileobj`. + + Args: + with_tqdm (`bool`, *optional*, defaults to `False`): + If True, iterating over the file object will display a progress bar. Only + works if the file-like object is a path to a file. Pure bytes and buffers + are not supported. + + Example: + + ```python + >>> operation = CommitOperationAdd( + ... path_in_repo="remote/dir/weights.h5", + ... path_or_fileobj="./local/weights.h5", + ... ) + CommitOperationAdd(path_in_repo='remote/dir/weights.h5', path_or_fileobj='./local/weights.h5') + + >>> with operation.as_file() as file: + ... content = file.read() + + >>> with operation.as_file(with_tqdm=True) as file: + ... while True: + ... data = file.read(1024) + ... if not data: + ... break + config.json: 100%|█████████████████████████| 8.19k/8.19k [00:02<00:00, 3.72kB/s] + + >>> with operation.as_file(with_tqdm=True) as file: + ... requests.put(..., data=file) + config.json: 100%|█████████████████████████| 8.19k/8.19k [00:02<00:00, 3.72kB/s] + ``` + """ + if isinstance(self.path_or_fileobj, str) or isinstance(self.path_or_fileobj, Path): + if with_tqdm: + with tqdm_stream_file(self.path_or_fileobj) as file: + yield file + else: + with open(self.path_or_fileobj, "rb") as file: + yield file + elif isinstance(self.path_or_fileobj, bytes): + yield io.BytesIO(self.path_or_fileobj) + elif isinstance(self.path_or_fileobj, io.BufferedIOBase): + prev_pos = self.path_or_fileobj.tell() + yield self.path_or_fileobj + self.path_or_fileobj.seek(prev_pos, io.SEEK_SET) + + def b64content(self) -> bytes: + """ + The base64-encoded content of `path_or_fileobj` + + Returns: `bytes` + """ + with self.as_file() as file: + return base64.b64encode(file.read()) + + @property + def _local_oid(self) -> Optional[str]: + """Return the OID of the local file. + + This OID is then compared to `self._remote_oid` to check if the file has changed compared to the remote one. + If the file did not change, we won't upload it again to prevent empty commits. + + For LFS files, the OID corresponds to the SHA256 of the file content (used a LFS ref). + For regular files, the OID corresponds to the SHA1 of the file content. + Note: this is slightly different to git OID computation since the oid of an LFS file is usually the git-SHA1 of the + pointer file content (not the actual file content). However, using the SHA256 is enough to detect changes + and more convenient client-side. + """ + if self._upload_mode is None: + return None + elif self._upload_mode == "lfs": + return self.upload_info.sha256.hex() + else: + # Regular file => compute sha1 + # => no need to read by chunk since the file is guaranteed to be <=5MB. + with self.as_file() as file: + return sha.git_hash(file.read()) + + +def _validate_path_in_repo(path_in_repo: str) -> str: + # Validate `path_in_repo` value to prevent a server-side issue + if path_in_repo.startswith("/"): + path_in_repo = path_in_repo[1:] + if path_in_repo == "." or path_in_repo == ".." or path_in_repo.startswith("../"): + raise ValueError(f"Invalid `path_in_repo` in CommitOperation: '{path_in_repo}'") + if path_in_repo.startswith("./"): + path_in_repo = path_in_repo[2:] + for forbidden in FORBIDDEN_FOLDERS: + if any(part == forbidden for part in path_in_repo.split("/")): + raise ValueError( + f"Invalid `path_in_repo` in CommitOperation: cannot update files under a '{forbidden}/' folder (path:" + f" '{path_in_repo}')." + ) + return path_in_repo + + +CommitOperation = Union[CommitOperationAdd, CommitOperationCopy, CommitOperationDelete] + + +def _warn_on_overwriting_operations(operations: List[CommitOperation]) -> None: + """ + Warn user when a list of operations is expected to overwrite itself in a single + commit. + + Rules: + - If a filepath is updated by multiple `CommitOperationAdd` operations, a warning + message is triggered. + - If a filepath is updated at least once by a `CommitOperationAdd` and then deleted + by a `CommitOperationDelete`, a warning is triggered. + - If a `CommitOperationDelete` deletes a filepath that is then updated by a + `CommitOperationAdd`, no warning is triggered. This is usually useless (no need to + delete before upload) but can happen if a user deletes an entire folder and then + add new files to it. + """ + nb_additions_per_path: Dict[str, int] = defaultdict(int) + for operation in operations: + path_in_repo = operation.path_in_repo + if isinstance(operation, CommitOperationAdd): + if nb_additions_per_path[path_in_repo] > 0: + warnings.warn( + "About to update multiple times the same file in the same commit:" + f" '{path_in_repo}'. This can cause undesired inconsistencies in" + " your repo." + ) + nb_additions_per_path[path_in_repo] += 1 + for parent in PurePosixPath(path_in_repo).parents: + # Also keep track of number of updated files per folder + # => warns if deleting a folder overwrite some contained files + nb_additions_per_path[str(parent)] += 1 + if isinstance(operation, CommitOperationDelete): + if nb_additions_per_path[str(PurePosixPath(path_in_repo))] > 0: + if operation.is_folder: + warnings.warn( + "About to delete a folder containing files that have just been" + f" updated within the same commit: '{path_in_repo}'. This can" + " cause undesired inconsistencies in your repo." + ) + else: + warnings.warn( + "About to delete a file that have just been updated within the" + f" same commit: '{path_in_repo}'. This can cause undesired" + " inconsistencies in your repo." + ) + + +@validate_hf_hub_args +def _upload_files( + *, + additions: List[CommitOperationAdd], + repo_type: str, + repo_id: str, + headers: Dict[str, str], + endpoint: Optional[str] = None, + num_threads: int = 5, + revision: Optional[str] = None, + create_pr: Optional[bool] = None, +): + """ + Negotiates per-file transfer (LFS vs Xet) and uploads in batches. + """ + xet_additions: List[CommitOperationAdd] = [] + lfs_actions: List[Dict] = [] + lfs_oid2addop: Dict[str, CommitOperationAdd] = {} + + for chunk in chunk_iterable(additions, chunk_size=UPLOAD_BATCH_MAX_NUM_FILES): + chunk_list = [op for op in chunk] + + transfers: List[str] = ["basic", "multipart"] + has_buffered_io_data = any(isinstance(op.path_or_fileobj, io.BufferedIOBase) for op in chunk_list) + if is_xet_available(): + if not has_buffered_io_data: + transfers.append("xet") + else: + logger.warning( + "Uploading files as a binary IO buffer is not supported by Xet Storage. " + "Falling back to HTTP upload." + ) + + actions_chunk, errors_chunk, chosen_transfer = post_lfs_batch_info( + upload_infos=[op.upload_info for op in chunk_list], + repo_id=repo_id, + repo_type=repo_type, + revision=revision, + endpoint=endpoint, + headers=headers, + token=None, # already passed in 'headers' + transfers=transfers, + ) + if errors_chunk: + message = "\n".join( + [ + f"Encountered error for file with OID {err.get('oid')}: `{err.get('error', {}).get('message')}" + for err in errors_chunk + ] + ) + raise ValueError(f"LFS batch API returned errors:\n{message}") + + # If server returns a transfer we didn't offer (e.g "xet" while uploading from BytesIO), + # fall back to LFS for this chunk. + if chosen_transfer == "xet" and ("xet" in transfers): + xet_additions.extend(chunk_list) + else: + lfs_actions.extend(actions_chunk) + for op in chunk_list: + lfs_oid2addop[op.upload_info.sha256.hex()] = op + + if len(lfs_actions) > 0: + _upload_lfs_files( + actions=lfs_actions, + oid2addop=lfs_oid2addop, + headers=headers, + endpoint=endpoint, + num_threads=num_threads, + ) + + if len(xet_additions) > 0: + _upload_xet_files( + additions=xet_additions, + repo_type=repo_type, + repo_id=repo_id, + headers=headers, + endpoint=endpoint, + revision=revision, + create_pr=create_pr, + ) + + +@validate_hf_hub_args +def _upload_lfs_files( + *, + actions: List[Dict], + oid2addop: Dict[str, CommitOperationAdd], + headers: Dict[str, str], + endpoint: Optional[str] = None, + num_threads: int = 5, +): + """ + Uploads the content of `additions` to the Hub using the large file storage protocol. + + Relevant external documentation: + - LFS Batch API: https://github.com/git-lfs/git-lfs/blob/main/docs/api/batch.md + + Args: + actions (`List[Dict]`): + LFS batch actions returned by the server. + oid2addop (`Dict[str, CommitOperationAdd]`): + A dictionary mapping the OID of the file to the corresponding `CommitOperationAdd` object. + headers (`Dict[str, str]`): + Headers to use for the request, including authorization headers and user agent. + endpoint (`str`, *optional*): + The endpoint to use for the request. Defaults to `constants.ENDPOINT`. + num_threads (`int`, *optional*): + The number of concurrent threads to use when uploading. Defaults to 5. + + Raises: + [`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError) + If an upload failed for any reason + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + Type of the repo to upload to: `"model"`, `"dataset"` or `"space"`. + repo_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + headers (`Dict[str, str]`): + Headers to use for the request, including authorization headers and user agent. + num_threads (`int`, *optional*): + The number of concurrent threads to use when uploading. Defaults to 5. + revision (`str`, *optional*): + The git revision to upload to. + + Raises: + [`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError) + If an upload failed for any reason + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + If the server returns malformed responses + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) + If the LFS batch endpoint returned an HTTP error. + """ + # Filter out files already present upstream + filtered_actions = [] + for action in actions: + if action.get("actions") is None: + logger.debug( + f"Content of file {oid2addop[action['oid']].path_in_repo} is already present upstream - skipping upload." + ) + else: + filtered_actions.append(action) + + # Upload according to server-provided actions + def _wrapped_lfs_upload(batch_action) -> None: + try: + operation = oid2addop[batch_action["oid"]] + lfs_upload(operation=operation, lfs_batch_action=batch_action, headers=headers, endpoint=endpoint) + except Exception as exc: + raise RuntimeError(f"Error while uploading '{operation.path_in_repo}' to the Hub.") from exc + + if constants.HF_HUB_ENABLE_HF_TRANSFER: + logger.debug(f"Uploading {len(filtered_actions)} LFS files to the Hub using `hf_transfer`.") + for action in hf_tqdm(filtered_actions, name="huggingface_hub.lfs_upload"): + _wrapped_lfs_upload(action) + elif len(filtered_actions) == 1: + logger.debug("Uploading 1 LFS file to the Hub") + _wrapped_lfs_upload(filtered_actions[0]) + else: + logger.debug( + f"Uploading {len(filtered_actions)} LFS files to the Hub using up to {num_threads} threads concurrently" + ) + thread_map( + _wrapped_lfs_upload, + filtered_actions, + desc=f"Upload {len(filtered_actions)} LFS files", + max_workers=num_threads, + tqdm_class=hf_tqdm, + ) + + +@validate_hf_hub_args +def _upload_xet_files( + *, + additions: List[CommitOperationAdd], + repo_type: str, + repo_id: str, + headers: Dict[str, str], + endpoint: Optional[str] = None, + revision: Optional[str] = None, + create_pr: Optional[bool] = None, +): + """ + Uploads the content of `additions` to the Hub using the xet storage protocol. + This chunks the files and deduplicates the chunks before uploading them to xetcas storage. + + Args: + additions (`List` of `CommitOperationAdd`): + The files to be uploaded. + repo_type (`str`): + Type of the repo to upload to: `"model"`, `"dataset"` or `"space"`. + repo_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + headers (`Dict[str, str]`): + Headers to use for the request, including authorization headers and user agent. + endpoint: (`str`, *optional*): + The endpoint to use for the xetcas service. Defaults to `constants.ENDPOINT`. + revision (`str`, *optional*): + The git revision to upload to. + create_pr (`bool`, *optional*): + Whether or not to create a Pull Request with that commit. + + Raises: + [`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError) + If an upload failed for any reason. + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + If the server returns malformed responses or if the user is unauthorized to upload to xet storage. + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) + If the LFS batch endpoint returned an HTTP error. + + **How it works:** + The file download system uses Xet storage, which is a content-addressable storage system that breaks files into chunks + for efficient storage and transfer. + + `hf_xet.upload_files` manages uploading files by: + - Taking a list of file paths to upload + - Breaking files into smaller chunks for efficient storage + - Avoiding duplicate storage by recognizing identical chunks across files + - Connecting to a storage server (CAS server) that manages these chunks + + The upload process works like this: + 1. Create a local folder at ~/.cache/huggingface/xet/chunk-cache to store file chunks for reuse. + 2. Process files in parallel (up to 8 files at once): + 2.1. Read the file content. + 2.2. Split the file content into smaller chunks based on content patterns: each chunk gets a unique ID based on what's in it. + 2.3. For each chunk: + - Check if it already exists in storage. + - Skip uploading chunks that already exist. + 2.4. Group chunks into larger blocks for efficient transfer. + 2.5. Upload these blocks to the storage server. + 2.6. Create and upload information about how the file is structured. + 3. Return reference files that contain information about the uploaded files, which can be used later to download them. + """ + if len(additions) == 0: + return + + # at this point, we know that hf_xet is installed + from hf_xet import upload_bytes, upload_files + + from .utils._xet_progress_reporting import XetProgressReporter + + try: + xet_connection_info = fetch_xet_connection_info_from_repo_info( + token_type=XetTokenType.WRITE, + repo_id=repo_id, + repo_type=repo_type, + revision=revision, + headers=headers, + endpoint=endpoint, + params={"create_pr": "1"} if create_pr else None, + ) + except HfHubHTTPError as e: + if e.response.status_code == 401: + raise XetAuthorizationError( + f"You are unauthorized to upload to xet storage for {repo_type}/{repo_id}. " + f"Please check that you have configured your access token with write access to the repo." + ) from e + raise + + xet_endpoint = xet_connection_info.endpoint + access_token_info = (xet_connection_info.access_token, xet_connection_info.expiration_unix_epoch) + + def token_refresher() -> Tuple[str, int]: + new_xet_connection = fetch_xet_connection_info_from_repo_info( + token_type=XetTokenType.WRITE, + repo_id=repo_id, + repo_type=repo_type, + revision=revision, + headers=headers, + endpoint=endpoint, + params={"create_pr": "1"} if create_pr else None, + ) + if new_xet_connection is None: + raise XetRefreshTokenError("Failed to refresh xet token") + return new_xet_connection.access_token, new_xet_connection.expiration_unix_epoch + + if not are_progress_bars_disabled(): + progress = XetProgressReporter() + progress_callback = progress.update_progress + else: + progress, progress_callback = None, None + + try: + all_bytes_ops = [op for op in additions if isinstance(op.path_or_fileobj, bytes)] + all_paths_ops = [op for op in additions if isinstance(op.path_or_fileobj, (str, Path))] + + if len(all_paths_ops) > 0: + all_paths = [str(op.path_or_fileobj) for op in all_paths_ops] + upload_files( + all_paths, + xet_endpoint, + access_token_info, + token_refresher, + progress_callback, + repo_type, + ) + + if len(all_bytes_ops) > 0: + all_bytes = [op.path_or_fileobj for op in all_bytes_ops] + upload_bytes( + all_bytes, + xet_endpoint, + access_token_info, + token_refresher, + progress_callback, + repo_type, + ) + + finally: + if progress is not None: + progress.close(False) + + return + + +def _validate_preupload_info(preupload_info: dict): + files = preupload_info.get("files") + if not isinstance(files, list): + raise ValueError("preupload_info is improperly formatted") + for file_info in files: + if not ( + isinstance(file_info, dict) + and isinstance(file_info.get("path"), str) + and isinstance(file_info.get("uploadMode"), str) + and (file_info["uploadMode"] in ("lfs", "regular")) + ): + raise ValueError("preupload_info is improperly formatted:") + return preupload_info + + +@validate_hf_hub_args +def _fetch_upload_modes( + additions: Iterable[CommitOperationAdd], + repo_type: str, + repo_id: str, + headers: Dict[str, str], + revision: str, + endpoint: Optional[str] = None, + create_pr: bool = False, + gitignore_content: Optional[str] = None, +) -> None: + """ + Requests the Hub "preupload" endpoint to determine whether each input file should be uploaded as a regular git blob, + as a git LFS blob, or as a XET file. Input `additions` are mutated in-place with the upload mode. + + Args: + additions (`Iterable` of :class:`CommitOperationAdd`): + Iterable of :class:`CommitOperationAdd` describing the files to + upload to the Hub. + repo_type (`str`): + Type of the repo to upload to: `"model"`, `"dataset"` or `"space"`. + repo_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + headers (`Dict[str, str]`): + Headers to use for the request, including authorization headers and user agent. + revision (`str`): + The git revision to upload the files to. Can be any valid git revision. + gitignore_content (`str`, *optional*): + The content of the `.gitignore` file to know which files should be ignored. The order of priority + is to first check if `gitignore_content` is passed, then check if the `.gitignore` file is present + in the list of files to commit and finally default to the `.gitignore` file already hosted on the Hub + (if any). + Raises: + [`~utils.HfHubHTTPError`] + If the Hub API returned an error. + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + If the Hub API response is improperly formatted. + """ + endpoint = endpoint if endpoint is not None else constants.ENDPOINT + + # Fetch upload mode (LFS or regular) chunk by chunk. + upload_modes: Dict[str, UploadMode] = {} + should_ignore_info: Dict[str, bool] = {} + oid_info: Dict[str, Optional[str]] = {} + + for chunk in chunk_iterable(additions, 256): + payload: Dict = { + "files": [ + { + "path": op.path_in_repo, + "sample": base64.b64encode(op.upload_info.sample).decode("ascii"), + "size": op.upload_info.size, + } + for op in chunk + ] + } + if gitignore_content is not None: + payload["gitIgnore"] = gitignore_content + + resp = get_session().post( + f"{endpoint}/api/{repo_type}s/{repo_id}/preupload/{revision}", + json=payload, + headers=headers, + params={"create_pr": "1"} if create_pr else None, + ) + hf_raise_for_status(resp) + preupload_info = _validate_preupload_info(resp.json()) + upload_modes.update(**{file["path"]: file["uploadMode"] for file in preupload_info["files"]}) + should_ignore_info.update(**{file["path"]: file["shouldIgnore"] for file in preupload_info["files"]}) + oid_info.update(**{file["path"]: file.get("oid") for file in preupload_info["files"]}) + + # Set upload mode for each addition operation + for addition in additions: + addition._upload_mode = upload_modes[addition.path_in_repo] + addition._should_ignore = should_ignore_info[addition.path_in_repo] + addition._remote_oid = oid_info[addition.path_in_repo] + + # Empty files cannot be uploaded as LFS (S3 would fail with a 501 Not Implemented) + # => empty files are uploaded as "regular" to still allow users to commit them. + for addition in additions: + if addition.upload_info.size == 0: + addition._upload_mode = "regular" + + +@validate_hf_hub_args +def _fetch_files_to_copy( + copies: Iterable[CommitOperationCopy], + repo_type: str, + repo_id: str, + headers: Dict[str, str], + revision: str, + endpoint: Optional[str] = None, +) -> Dict[Tuple[str, Optional[str]], Union["RepoFile", bytes]]: + """ + Fetch information about the files to copy. + + For LFS files, we only need their metadata (file size and sha256) while for regular files + we need to download the raw content from the Hub. + + Args: + copies (`Iterable` of :class:`CommitOperationCopy`): + Iterable of :class:`CommitOperationCopy` describing the files to + copy on the Hub. + repo_type (`str`): + Type of the repo to upload to: `"model"`, `"dataset"` or `"space"`. + repo_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + headers (`Dict[str, str]`): + Headers to use for the request, including authorization headers and user agent. + revision (`str`): + The git revision to upload the files to. Can be any valid git revision. + + Returns: `Dict[Tuple[str, Optional[str]], Union[RepoFile, bytes]]]` + Key is the file path and revision of the file to copy. + Value is the raw content as bytes (for regular files) or the file information as a RepoFile (for LFS files). + + Raises: + [`~utils.HfHubHTTPError`] + If the Hub API returned an error. + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + If the Hub API response is improperly formatted. + """ + from .hf_api import HfApi, RepoFolder + + hf_api = HfApi(endpoint=endpoint, headers=headers) + files_to_copy: Dict[Tuple[str, Optional[str]], Union["RepoFile", bytes]] = {} + # Store (path, revision) -> oid mapping + oid_info: Dict[Tuple[str, Optional[str]], Optional[str]] = {} + # 1. Fetch OIDs for destination paths in batches. + dest_paths = [op.path_in_repo for op in copies] + for offset in range(0, len(dest_paths), FETCH_LFS_BATCH_SIZE): + dest_repo_files = hf_api.get_paths_info( + repo_id=repo_id, + paths=dest_paths[offset : offset + FETCH_LFS_BATCH_SIZE], + revision=revision, + repo_type=repo_type, + ) + for file in dest_repo_files: + if not isinstance(file, RepoFolder): + oid_info[(file.path, revision)] = file.blob_id + + # 2. Group by source revision and fetch source file info in batches. + for src_revision, operations in groupby(copies, key=lambda op: op.src_revision): + operations = list(operations) # type: ignore + src_paths = [op.src_path_in_repo for op in operations] + for offset in range(0, len(src_paths), FETCH_LFS_BATCH_SIZE): + src_repo_files = hf_api.get_paths_info( + repo_id=repo_id, + paths=src_paths[offset : offset + FETCH_LFS_BATCH_SIZE], + revision=src_revision or revision, + repo_type=repo_type, + ) + + for src_repo_file in src_repo_files: + if isinstance(src_repo_file, RepoFolder): + raise NotImplementedError("Copying a folder is not implemented.") + oid_info[(src_repo_file.path, src_revision)] = src_repo_file.blob_id + # If it's an LFS file, store the RepoFile object. Otherwise, download raw bytes. + if src_repo_file.lfs: + files_to_copy[(src_repo_file.path, src_revision)] = src_repo_file + else: + # TODO: (optimization) download regular files to copy concurrently + url = hf_hub_url( + endpoint=endpoint, + repo_type=repo_type, + repo_id=repo_id, + revision=src_revision or revision, + filename=src_repo_file.path, + ) + response = get_session().get(url, headers=headers) + hf_raise_for_status(response) + files_to_copy[(src_repo_file.path, src_revision)] = response.content + # 3. Ensure all operations found a corresponding file in the Hub + # and track src/dest OIDs for each operation. + for operation in operations: + if (operation.src_path_in_repo, src_revision) not in files_to_copy: + raise EntryNotFoundError( + f"Cannot copy {operation.src_path_in_repo} at revision " + f"{src_revision or revision}: file is missing on repo." + ) + operation._src_oid = oid_info.get((operation.src_path_in_repo, operation.src_revision)) + operation._dest_oid = oid_info.get((operation.path_in_repo, revision)) + return files_to_copy + + +def _prepare_commit_payload( + operations: Iterable[CommitOperation], + files_to_copy: Dict[Tuple[str, Optional[str]], Union["RepoFile", bytes]], + commit_message: str, + commit_description: Optional[str] = None, + parent_commit: Optional[str] = None, +) -> Iterable[Dict[str, Any]]: + """ + Builds the payload to POST to the `/commit` API of the Hub. + + Payload is returned as an iterator so that it can be streamed as a ndjson in the + POST request. + + For more information, see: + - https://github.com/huggingface/huggingface_hub/issues/1085#issuecomment-1265208073 + - http://ndjson.org/ + """ + commit_description = commit_description if commit_description is not None else "" + + # 1. Send a header item with the commit metadata + header_value = {"summary": commit_message, "description": commit_description} + if parent_commit is not None: + header_value["parentCommit"] = parent_commit + yield {"key": "header", "value": header_value} + + nb_ignored_files = 0 + + # 2. Send operations, one per line + for operation in operations: + # Skip ignored files + if isinstance(operation, CommitOperationAdd) and operation._should_ignore: + logger.debug(f"Skipping file '{operation.path_in_repo}' in commit (ignored by gitignore file).") + nb_ignored_files += 1 + continue + + # 2.a. Case adding a regular file + if isinstance(operation, CommitOperationAdd) and operation._upload_mode == "regular": + yield { + "key": "file", + "value": { + "content": operation.b64content().decode(), + "path": operation.path_in_repo, + "encoding": "base64", + }, + } + # 2.b. Case adding an LFS file + elif isinstance(operation, CommitOperationAdd) and operation._upload_mode == "lfs": + yield { + "key": "lfsFile", + "value": { + "path": operation.path_in_repo, + "algo": "sha256", + "oid": operation.upload_info.sha256.hex(), + "size": operation.upload_info.size, + }, + } + # 2.c. Case deleting a file or folder + elif isinstance(operation, CommitOperationDelete): + yield { + "key": "deletedFolder" if operation.is_folder else "deletedFile", + "value": {"path": operation.path_in_repo}, + } + # 2.d. Case copying a file or folder + elif isinstance(operation, CommitOperationCopy): + file_to_copy = files_to_copy[(operation.src_path_in_repo, operation.src_revision)] + if isinstance(file_to_copy, bytes): + yield { + "key": "file", + "value": { + "content": base64.b64encode(file_to_copy).decode(), + "path": operation.path_in_repo, + "encoding": "base64", + }, + } + elif file_to_copy.lfs: + yield { + "key": "lfsFile", + "value": { + "path": operation.path_in_repo, + "algo": "sha256", + "oid": file_to_copy.lfs.sha256, + }, + } + else: + raise ValueError( + "Malformed files_to_copy (should be raw file content as bytes or RepoFile objects with LFS info." + ) + # 2.e. Never expected to happen + else: + raise ValueError( + f"Unknown operation to commit. Operation: {operation}. Upload mode:" + f" {getattr(operation, '_upload_mode', None)}" + ) + + if nb_ignored_files > 0: + logger.info(f"Skipped {nb_ignored_files} file(s) in commit (ignored by gitignore file).") diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/_commit_scheduler.py b/venv/lib/python3.13/site-packages/huggingface_hub/_commit_scheduler.py new file mode 100644 index 0000000000000000000000000000000000000000..1bc8db6a8ade4d2253dd241a66c86def5dac2733 --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/_commit_scheduler.py @@ -0,0 +1,350 @@ +import atexit +import logging +import os +import time +from concurrent.futures import Future +from dataclasses import dataclass +from io import SEEK_END, SEEK_SET, BytesIO +from pathlib import Path +from threading import Lock, Thread +from typing import Dict, List, Optional, Union + +from .hf_api import DEFAULT_IGNORE_PATTERNS, CommitInfo, CommitOperationAdd, HfApi +from .utils import filter_repo_objects + + +logger = logging.getLogger(__name__) + + +@dataclass(frozen=True) +class _FileToUpload: + """Temporary dataclass to store info about files to upload. Not meant to be used directly.""" + + local_path: Path + path_in_repo: str + size_limit: int + last_modified: float + + +class CommitScheduler: + """ + Scheduler to upload a local folder to the Hub at regular intervals (e.g. push to hub every 5 minutes). + + The recommended way to use the scheduler is to use it as a context manager. This ensures that the scheduler is + properly stopped and the last commit is triggered when the script ends. The scheduler can also be stopped manually + with the `stop` method. Checkout the [upload guide](https://huggingface.co/docs/huggingface_hub/guides/upload#scheduled-uploads) + to learn more about how to use it. + + Args: + repo_id (`str`): + The id of the repo to commit to. + folder_path (`str` or `Path`): + Path to the local folder to upload regularly. + every (`int` or `float`, *optional*): + The number of minutes between each commit. Defaults to 5 minutes. + path_in_repo (`str`, *optional*): + Relative path of the directory in the repo, for example: `"checkpoints/"`. Defaults to the root folder + of the repository. + repo_type (`str`, *optional*): + The type of the repo to commit to. Defaults to `model`. + revision (`str`, *optional*): + The revision of the repo to commit to. Defaults to `main`. + private (`bool`, *optional*): + Whether to make the repo private. If `None` (default), the repo will be public unless the organization's default is private. This value is ignored if the repo already exists. + token (`str`, *optional*): + The token to use to commit to the repo. Defaults to the token saved on the machine. + allow_patterns (`List[str]` or `str`, *optional*): + If provided, only files matching at least one pattern are uploaded. + ignore_patterns (`List[str]` or `str`, *optional*): + If provided, files matching any of the patterns are not uploaded. + squash_history (`bool`, *optional*): + Whether to squash the history of the repo after each commit. Defaults to `False`. Squashing commits is + useful to avoid degraded performances on the repo when it grows too large. + hf_api (`HfApi`, *optional*): + The [`HfApi`] client to use to commit to the Hub. Can be set with custom settings (user agent, token,...). + + Example: + ```py + >>> from pathlib import Path + >>> from huggingface_hub import CommitScheduler + + # Scheduler uploads every 10 minutes + >>> csv_path = Path("watched_folder/data.csv") + >>> CommitScheduler(repo_id="test_scheduler", repo_type="dataset", folder_path=csv_path.parent, every=10) + + >>> with csv_path.open("a") as f: + ... f.write("first line") + + # Some time later (...) + >>> with csv_path.open("a") as f: + ... f.write("second line") + ``` + + Example using a context manager: + ```py + >>> from pathlib import Path + >>> from huggingface_hub import CommitScheduler + + >>> with CommitScheduler(repo_id="test_scheduler", repo_type="dataset", folder_path="watched_folder", every=10) as scheduler: + ... csv_path = Path("watched_folder/data.csv") + ... with csv_path.open("a") as f: + ... f.write("first line") + ... (...) + ... with csv_path.open("a") as f: + ... f.write("second line") + + # Scheduler is now stopped and last commit have been triggered + ``` + """ + + def __init__( + self, + *, + repo_id: str, + folder_path: Union[str, Path], + every: Union[int, float] = 5, + path_in_repo: Optional[str] = None, + repo_type: Optional[str] = None, + revision: Optional[str] = None, + private: Optional[bool] = None, + token: Optional[str] = None, + allow_patterns: Optional[Union[List[str], str]] = None, + ignore_patterns: Optional[Union[List[str], str]] = None, + squash_history: bool = False, + hf_api: Optional["HfApi"] = None, + ) -> None: + self.api = hf_api or HfApi(token=token) + + # Folder + self.folder_path = Path(folder_path).expanduser().resolve() + self.path_in_repo = path_in_repo or "" + self.allow_patterns = allow_patterns + + if ignore_patterns is None: + ignore_patterns = [] + elif isinstance(ignore_patterns, str): + ignore_patterns = [ignore_patterns] + self.ignore_patterns = ignore_patterns + DEFAULT_IGNORE_PATTERNS + + if self.folder_path.is_file(): + raise ValueError(f"'folder_path' must be a directory, not a file: '{self.folder_path}'.") + self.folder_path.mkdir(parents=True, exist_ok=True) + + # Repository + repo_url = self.api.create_repo(repo_id=repo_id, private=private, repo_type=repo_type, exist_ok=True) + self.repo_id = repo_url.repo_id + self.repo_type = repo_type + self.revision = revision + self.token = token + + # Keep track of already uploaded files + self.last_uploaded: Dict[Path, float] = {} # key is local path, value is timestamp + + # Scheduler + if not every > 0: + raise ValueError(f"'every' must be a positive integer, not '{every}'.") + self.lock = Lock() + self.every = every + self.squash_history = squash_history + + logger.info(f"Scheduled job to push '{self.folder_path}' to '{self.repo_id}' every {self.every} minutes.") + self._scheduler_thread = Thread(target=self._run_scheduler, daemon=True) + self._scheduler_thread.start() + atexit.register(self._push_to_hub) + + self.__stopped = False + + def stop(self) -> None: + """Stop the scheduler. + + A stopped scheduler cannot be restarted. Mostly for tests purposes. + """ + self.__stopped = True + + def __enter__(self) -> "CommitScheduler": + return self + + def __exit__(self, exc_type, exc_value, traceback) -> None: + # Upload last changes before exiting + self.trigger().result() + self.stop() + return + + def _run_scheduler(self) -> None: + """Dumb thread waiting between each scheduled push to Hub.""" + while True: + self.last_future = self.trigger() + time.sleep(self.every * 60) + if self.__stopped: + break + + def trigger(self) -> Future: + """Trigger a `push_to_hub` and return a future. + + This method is automatically called every `every` minutes. You can also call it manually to trigger a commit + immediately, without waiting for the next scheduled commit. + """ + return self.api.run_as_future(self._push_to_hub) + + def _push_to_hub(self) -> Optional[CommitInfo]: + if self.__stopped: # If stopped, already scheduled commits are ignored + return None + + logger.info("(Background) scheduled commit triggered.") + try: + value = self.push_to_hub() + if self.squash_history: + logger.info("(Background) squashing repo history.") + self.api.super_squash_history(repo_id=self.repo_id, repo_type=self.repo_type, branch=self.revision) + return value + except Exception as e: + logger.error(f"Error while pushing to Hub: {e}") # Depending on the setup, error might be silenced + raise + + def push_to_hub(self) -> Optional[CommitInfo]: + """ + Push folder to the Hub and return the commit info. + + > [!WARNING] + > This method is not meant to be called directly. It is run in the background by the scheduler, respecting a + > queue mechanism to avoid concurrent commits. Making a direct call to the method might lead to concurrency + > issues. + + The default behavior of `push_to_hub` is to assume an append-only folder. It lists all files in the folder and + uploads only changed files. If no changes are found, the method returns without committing anything. If you want + to change this behavior, you can inherit from [`CommitScheduler`] and override this method. This can be useful + for example to compress data together in a single file before committing. For more details and examples, check + out our [integration guide](https://huggingface.co/docs/huggingface_hub/main/en/guides/upload#scheduled-uploads). + """ + # Check files to upload (with lock) + with self.lock: + logger.debug("Listing files to upload for scheduled commit.") + + # List files from folder (taken from `_prepare_upload_folder_additions`) + relpath_to_abspath = { + path.relative_to(self.folder_path).as_posix(): path + for path in sorted(self.folder_path.glob("**/*")) # sorted to be deterministic + if path.is_file() + } + prefix = f"{self.path_in_repo.strip('/')}/" if self.path_in_repo else "" + + # Filter with pattern + filter out unchanged files + retrieve current file size + files_to_upload: List[_FileToUpload] = [] + for relpath in filter_repo_objects( + relpath_to_abspath.keys(), allow_patterns=self.allow_patterns, ignore_patterns=self.ignore_patterns + ): + local_path = relpath_to_abspath[relpath] + stat = local_path.stat() + if self.last_uploaded.get(local_path) is None or self.last_uploaded[local_path] != stat.st_mtime: + files_to_upload.append( + _FileToUpload( + local_path=local_path, + path_in_repo=prefix + relpath, + size_limit=stat.st_size, + last_modified=stat.st_mtime, + ) + ) + + # Return if nothing to upload + if len(files_to_upload) == 0: + logger.debug("Dropping schedule commit: no changed file to upload.") + return None + + # Convert `_FileToUpload` as `CommitOperationAdd` (=> compute file shas + limit to file size) + logger.debug("Removing unchanged files since previous scheduled commit.") + add_operations = [ + CommitOperationAdd( + # Cap the file to its current size, even if the user append data to it while a scheduled commit is happening + path_or_fileobj=PartialFileIO(file_to_upload.local_path, size_limit=file_to_upload.size_limit), + path_in_repo=file_to_upload.path_in_repo, + ) + for file_to_upload in files_to_upload + ] + + # Upload files (append mode expected - no need for lock) + logger.debug("Uploading files for scheduled commit.") + commit_info = self.api.create_commit( + repo_id=self.repo_id, + repo_type=self.repo_type, + operations=add_operations, + commit_message="Scheduled Commit", + revision=self.revision, + ) + + # Successful commit: keep track of the latest "last_modified" for each file + for file in files_to_upload: + self.last_uploaded[file.local_path] = file.last_modified + return commit_info + + +class PartialFileIO(BytesIO): + """A file-like object that reads only the first part of a file. + + Useful to upload a file to the Hub when the user might still be appending data to it. Only the first part of the + file is uploaded (i.e. the part that was available when the filesystem was first scanned). + + In practice, only used internally by the CommitScheduler to regularly push a folder to the Hub with minimal + disturbance for the user. The object is passed to `CommitOperationAdd`. + + Only supports `read`, `tell` and `seek` methods. + + Args: + file_path (`str` or `Path`): + Path to the file to read. + size_limit (`int`): + The maximum number of bytes to read from the file. If the file is larger than this, only the first part + will be read (and uploaded). + """ + + def __init__(self, file_path: Union[str, Path], size_limit: int) -> None: + self._file_path = Path(file_path) + self._file = self._file_path.open("rb") + self._size_limit = min(size_limit, os.fstat(self._file.fileno()).st_size) + + def __del__(self) -> None: + self._file.close() + return super().__del__() + + def __repr__(self) -> str: + return f"" + + def __len__(self) -> int: + return self._size_limit + + def __getattribute__(self, name: str): + if name.startswith("_") or name in ("read", "tell", "seek"): # only 3 public methods supported + return super().__getattribute__(name) + raise NotImplementedError(f"PartialFileIO does not support '{name}'.") + + def tell(self) -> int: + """Return the current file position.""" + return self._file.tell() + + def seek(self, __offset: int, __whence: int = SEEK_SET) -> int: + """Change the stream position to the given offset. + + Behavior is the same as a regular file, except that the position is capped to the size limit. + """ + if __whence == SEEK_END: + # SEEK_END => set from the truncated end + __offset = len(self) + __offset + __whence = SEEK_SET + + pos = self._file.seek(__offset, __whence) + if pos > self._size_limit: + return self._file.seek(self._size_limit) + return pos + + def read(self, __size: Optional[int] = -1) -> bytes: + """Read at most `__size` bytes from the file. + + Behavior is the same as a regular file, except that it is capped to the size limit. + """ + current = self._file.tell() + if __size is None or __size < 0: + # Read until file limit + truncated_size = self._size_limit - current + else: + # Read until file limit or __size + truncated_size = min(__size, self._size_limit - current) + return self._file.read(truncated_size) diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/_inference_endpoints.py b/venv/lib/python3.13/site-packages/huggingface_hub/_inference_endpoints.py new file mode 100644 index 0000000000000000000000000000000000000000..37f772bfbe28013ff5329d0a19a438706d50a19c --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/_inference_endpoints.py @@ -0,0 +1,413 @@ +import time +from dataclasses import dataclass, field +from datetime import datetime +from enum import Enum +from typing import TYPE_CHECKING, Dict, Optional, Union + +from huggingface_hub.errors import InferenceEndpointError, InferenceEndpointTimeoutError + +from .utils import get_session, logging, parse_datetime + + +if TYPE_CHECKING: + from .hf_api import HfApi + from .inference._client import InferenceClient + from .inference._generated._async_client import AsyncInferenceClient + +logger = logging.get_logger(__name__) + + +class InferenceEndpointStatus(str, Enum): + PENDING = "pending" + INITIALIZING = "initializing" + UPDATING = "updating" + UPDATE_FAILED = "updateFailed" + RUNNING = "running" + PAUSED = "paused" + FAILED = "failed" + SCALED_TO_ZERO = "scaledToZero" + + +class InferenceEndpointType(str, Enum): + PUBlIC = "public" + PROTECTED = "protected" + PRIVATE = "private" + + +@dataclass +class InferenceEndpoint: + """ + Contains information about a deployed Inference Endpoint. + + Args: + name (`str`): + The unique name of the Inference Endpoint. + namespace (`str`): + The namespace where the Inference Endpoint is located. + repository (`str`): + The name of the model repository deployed on this Inference Endpoint. + status ([`InferenceEndpointStatus`]): + The current status of the Inference Endpoint. + url (`str`, *optional*): + The URL of the Inference Endpoint, if available. Only a deployed Inference Endpoint will have a URL. + framework (`str`): + The machine learning framework used for the model. + revision (`str`): + The specific model revision deployed on the Inference Endpoint. + task (`str`): + The task associated with the deployed model. + created_at (`datetime.datetime`): + The timestamp when the Inference Endpoint was created. + updated_at (`datetime.datetime`): + The timestamp of the last update of the Inference Endpoint. + type ([`InferenceEndpointType`]): + The type of the Inference Endpoint (public, protected, private). + raw (`Dict`): + The raw dictionary data returned from the API. + token (`str` or `bool`, *optional*): + Authentication token for the Inference Endpoint, if set when requesting the API. Will default to the + locally saved token if not provided. Pass `token=False` if you don't want to send your token to the server. + + Example: + ```python + >>> from huggingface_hub import get_inference_endpoint + >>> endpoint = get_inference_endpoint("my-text-to-image") + >>> endpoint + InferenceEndpoint(name='my-text-to-image', ...) + + # Get status + >>> endpoint.status + 'running' + >>> endpoint.url + 'https://my-text-to-image.region.vendor.endpoints.huggingface.cloud' + + # Run inference + >>> endpoint.client.text_to_image(...) + + # Pause endpoint to save $$$ + >>> endpoint.pause() + + # ... + # Resume and wait for deployment + >>> endpoint.resume() + >>> endpoint.wait() + >>> endpoint.client.text_to_image(...) + ``` + """ + + # Field in __repr__ + name: str = field(init=False) + namespace: str + repository: str = field(init=False) + status: InferenceEndpointStatus = field(init=False) + health_route: str = field(init=False) + url: Optional[str] = field(init=False) + + # Other fields + framework: str = field(repr=False, init=False) + revision: str = field(repr=False, init=False) + task: str = field(repr=False, init=False) + created_at: datetime = field(repr=False, init=False) + updated_at: datetime = field(repr=False, init=False) + type: InferenceEndpointType = field(repr=False, init=False) + + # Raw dict from the API + raw: Dict = field(repr=False) + + # Internal fields + _token: Union[str, bool, None] = field(repr=False, compare=False) + _api: "HfApi" = field(repr=False, compare=False) + + @classmethod + def from_raw( + cls, raw: Dict, namespace: str, token: Union[str, bool, None] = None, api: Optional["HfApi"] = None + ) -> "InferenceEndpoint": + """Initialize object from raw dictionary.""" + if api is None: + from .hf_api import HfApi + + api = HfApi() + if token is None: + token = api.token + + # All other fields are populated in __post_init__ + return cls(raw=raw, namespace=namespace, _token=token, _api=api) + + def __post_init__(self) -> None: + """Populate fields from raw dictionary.""" + self._populate_from_raw() + + @property + def client(self) -> "InferenceClient": + """Returns a client to make predictions on this Inference Endpoint. + + Returns: + [`InferenceClient`]: an inference client pointing to the deployed endpoint. + + Raises: + [`InferenceEndpointError`]: If the Inference Endpoint is not yet deployed. + """ + if self.url is None: + raise InferenceEndpointError( + "Cannot create a client for this Inference Endpoint as it is not yet deployed. " + "Please wait for the Inference Endpoint to be deployed using `endpoint.wait()` and try again." + ) + from .inference._client import InferenceClient + + return InferenceClient( + model=self.url, + token=self._token, # type: ignore[arg-type] # boolean token shouldn't be possible. In practice it's ok. + ) + + @property + def async_client(self) -> "AsyncInferenceClient": + """Returns a client to make predictions on this Inference Endpoint. + + Returns: + [`AsyncInferenceClient`]: an asyncio-compatible inference client pointing to the deployed endpoint. + + Raises: + [`InferenceEndpointError`]: If the Inference Endpoint is not yet deployed. + """ + if self.url is None: + raise InferenceEndpointError( + "Cannot create a client for this Inference Endpoint as it is not yet deployed. " + "Please wait for the Inference Endpoint to be deployed using `endpoint.wait()` and try again." + ) + from .inference._generated._async_client import AsyncInferenceClient + + return AsyncInferenceClient( + model=self.url, + token=self._token, # type: ignore[arg-type] # boolean token shouldn't be possible. In practice it's ok. + ) + + def wait(self, timeout: Optional[int] = None, refresh_every: int = 5) -> "InferenceEndpoint": + """Wait for the Inference Endpoint to be deployed. + + Information from the server will be fetched every 1s. If the Inference Endpoint is not deployed after `timeout` + seconds, a [`InferenceEndpointTimeoutError`] will be raised. The [`InferenceEndpoint`] will be mutated in place with the latest + data. + + Args: + timeout (`int`, *optional*): + The maximum time to wait for the Inference Endpoint to be deployed, in seconds. If `None`, will wait + indefinitely. + refresh_every (`int`, *optional*): + The time to wait between each fetch of the Inference Endpoint status, in seconds. Defaults to 5s. + + Returns: + [`InferenceEndpoint`]: the same Inference Endpoint, mutated in place with the latest data. + + Raises: + [`InferenceEndpointError`] + If the Inference Endpoint ended up in a failed state. + [`InferenceEndpointTimeoutError`] + If the Inference Endpoint is not deployed after `timeout` seconds. + """ + if timeout is not None and timeout < 0: + raise ValueError("`timeout` cannot be negative.") + if refresh_every <= 0: + raise ValueError("`refresh_every` must be positive.") + + start = time.time() + while True: + if self.status == InferenceEndpointStatus.FAILED: + raise InferenceEndpointError( + f"Inference Endpoint {self.name} failed to deploy. Please check the logs for more information." + ) + if self.status == InferenceEndpointStatus.UPDATE_FAILED: + raise InferenceEndpointError( + f"Inference Endpoint {self.name} failed to update. Please check the logs for more information." + ) + if self.status == InferenceEndpointStatus.RUNNING and self.url is not None: + # Verify the endpoint is actually reachable + _health_url = f"{self.url.rstrip('/')}/{self.health_route.lstrip('/')}" + response = get_session().get(_health_url, headers=self._api._build_hf_headers(token=self._token)) + if response.status_code == 200: + logger.info("Inference Endpoint is ready to be used.") + return self + + if timeout is not None: + if time.time() - start > timeout: + raise InferenceEndpointTimeoutError("Timeout while waiting for Inference Endpoint to be deployed.") + logger.info(f"Inference Endpoint is not deployed yet ({self.status}). Waiting {refresh_every}s...") + time.sleep(refresh_every) + self.fetch() + + def fetch(self) -> "InferenceEndpoint": + """Fetch latest information about the Inference Endpoint. + + Returns: + [`InferenceEndpoint`]: the same Inference Endpoint, mutated in place with the latest data. + """ + obj = self._api.get_inference_endpoint(name=self.name, namespace=self.namespace, token=self._token) # type: ignore [arg-type] + self.raw = obj.raw + self._populate_from_raw() + return self + + def update( + self, + *, + # Compute update + accelerator: Optional[str] = None, + instance_size: Optional[str] = None, + instance_type: Optional[str] = None, + min_replica: Optional[int] = None, + max_replica: Optional[int] = None, + scale_to_zero_timeout: Optional[int] = None, + # Model update + repository: Optional[str] = None, + framework: Optional[str] = None, + revision: Optional[str] = None, + task: Optional[str] = None, + custom_image: Optional[Dict] = None, + secrets: Optional[Dict[str, str]] = None, + ) -> "InferenceEndpoint": + """Update the Inference Endpoint. + + This method allows the update of either the compute configuration, the deployed model, or both. All arguments are + optional but at least one must be provided. + + This is an alias for [`HfApi.update_inference_endpoint`]. The current object is mutated in place with the + latest data from the server. + + Args: + accelerator (`str`, *optional*): + The hardware accelerator to be used for inference (e.g. `"cpu"`). + instance_size (`str`, *optional*): + The size or type of the instance to be used for hosting the model (e.g. `"x4"`). + instance_type (`str`, *optional*): + The cloud instance type where the Inference Endpoint will be deployed (e.g. `"intel-icl"`). + min_replica (`int`, *optional*): + The minimum number of replicas (instances) to keep running for the Inference Endpoint. + max_replica (`int`, *optional*): + The maximum number of replicas (instances) to scale to for the Inference Endpoint. + scale_to_zero_timeout (`int`, *optional*): + The duration in minutes before an inactive endpoint is scaled to zero. + + repository (`str`, *optional*): + The name of the model repository associated with the Inference Endpoint (e.g. `"gpt2"`). + framework (`str`, *optional*): + The machine learning framework used for the model (e.g. `"custom"`). + revision (`str`, *optional*): + The specific model revision to deploy on the Inference Endpoint (e.g. `"6c0e6080953db56375760c0471a8c5f2929baf11"`). + task (`str`, *optional*): + The task on which to deploy the model (e.g. `"text-classification"`). + custom_image (`Dict`, *optional*): + A custom Docker image to use for the Inference Endpoint. This is useful if you want to deploy an + Inference Endpoint running on the `text-generation-inference` (TGI) framework (see examples). + secrets (`Dict[str, str]`, *optional*): + Secret values to inject in the container environment. + Returns: + [`InferenceEndpoint`]: the same Inference Endpoint, mutated in place with the latest data. + """ + # Make API call + obj = self._api.update_inference_endpoint( + name=self.name, + namespace=self.namespace, + accelerator=accelerator, + instance_size=instance_size, + instance_type=instance_type, + min_replica=min_replica, + max_replica=max_replica, + scale_to_zero_timeout=scale_to_zero_timeout, + repository=repository, + framework=framework, + revision=revision, + task=task, + custom_image=custom_image, + secrets=secrets, + token=self._token, # type: ignore [arg-type] + ) + + # Mutate current object + self.raw = obj.raw + self._populate_from_raw() + return self + + def pause(self) -> "InferenceEndpoint": + """Pause the Inference Endpoint. + + A paused Inference Endpoint will not be charged. It can be resumed at any time using [`InferenceEndpoint.resume`]. + This is different than scaling the Inference Endpoint to zero with [`InferenceEndpoint.scale_to_zero`], which + would be automatically restarted when a request is made to it. + + This is an alias for [`HfApi.pause_inference_endpoint`]. The current object is mutated in place with the + latest data from the server. + + Returns: + [`InferenceEndpoint`]: the same Inference Endpoint, mutated in place with the latest data. + """ + obj = self._api.pause_inference_endpoint(name=self.name, namespace=self.namespace, token=self._token) # type: ignore [arg-type] + self.raw = obj.raw + self._populate_from_raw() + return self + + def resume(self, running_ok: bool = True) -> "InferenceEndpoint": + """Resume the Inference Endpoint. + + This is an alias for [`HfApi.resume_inference_endpoint`]. The current object is mutated in place with the + latest data from the server. + + Args: + running_ok (`bool`, *optional*): + If `True`, the method will not raise an error if the Inference Endpoint is already running. Defaults to + `True`. + + Returns: + [`InferenceEndpoint`]: the same Inference Endpoint, mutated in place with the latest data. + """ + obj = self._api.resume_inference_endpoint( + name=self.name, namespace=self.namespace, running_ok=running_ok, token=self._token + ) # type: ignore [arg-type] + self.raw = obj.raw + self._populate_from_raw() + return self + + def scale_to_zero(self) -> "InferenceEndpoint": + """Scale Inference Endpoint to zero. + + An Inference Endpoint scaled to zero will not be charged. It will be resume on the next request to it, with a + cold start delay. This is different than pausing the Inference Endpoint with [`InferenceEndpoint.pause`], which + would require a manual resume with [`InferenceEndpoint.resume`]. + + This is an alias for [`HfApi.scale_to_zero_inference_endpoint`]. The current object is mutated in place with the + latest data from the server. + + Returns: + [`InferenceEndpoint`]: the same Inference Endpoint, mutated in place with the latest data. + """ + obj = self._api.scale_to_zero_inference_endpoint(name=self.name, namespace=self.namespace, token=self._token) # type: ignore [arg-type] + self.raw = obj.raw + self._populate_from_raw() + return self + + def delete(self) -> None: + """Delete the Inference Endpoint. + + This operation is not reversible. If you don't want to be charged for an Inference Endpoint, it is preferable + to pause it with [`InferenceEndpoint.pause`] or scale it to zero with [`InferenceEndpoint.scale_to_zero`]. + + This is an alias for [`HfApi.delete_inference_endpoint`]. + """ + self._api.delete_inference_endpoint(name=self.name, namespace=self.namespace, token=self._token) # type: ignore [arg-type] + + def _populate_from_raw(self) -> None: + """Populate fields from raw dictionary. + + Called in __post_init__ + each time the Inference Endpoint is updated. + """ + # Repr fields + self.name = self.raw["name"] + self.repository = self.raw["model"]["repository"] + self.status = self.raw["status"]["state"] + self.url = self.raw["status"].get("url") + self.health_route = self.raw["healthRoute"] + + # Other fields + self.framework = self.raw["model"]["framework"] + self.revision = self.raw["model"]["revision"] + self.task = self.raw["model"]["task"] + self.created_at = parse_datetime(self.raw["status"]["createdAt"]) + self.updated_at = parse_datetime(self.raw["status"]["updatedAt"]) + self.type = self.raw["type"] diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/_jobs_api.py b/venv/lib/python3.13/site-packages/huggingface_hub/_jobs_api.py new file mode 100644 index 0000000000000000000000000000000000000000..00177a008c177d5484fb9069d0684bbf416e9289 --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/_jobs_api.py @@ -0,0 +1,301 @@ +# coding=utf-8 +# Copyright 2025-present, the HuggingFace Inc. team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from dataclasses import dataclass +from datetime import datetime +from enum import Enum +from typing import Any, Dict, List, Optional, Union + +from huggingface_hub import constants +from huggingface_hub._space_api import SpaceHardware +from huggingface_hub.utils._datetime import parse_datetime + + +class JobStage(str, Enum): + """ + Enumeration of possible stage of a Job on the Hub. + + Value can be compared to a string: + ```py + assert JobStage.COMPLETED == "COMPLETED" + ``` + Possible values are: `COMPLETED`, `CANCELED`, `ERROR`, `DELETED`, `RUNNING`. + Taken from https://github.com/huggingface/moon-landing/blob/main/server/job_types/JobInfo.ts#L61 (private url). + """ + + # Copied from moon-landing > server > lib > Job.ts + COMPLETED = "COMPLETED" + CANCELED = "CANCELED" + ERROR = "ERROR" + DELETED = "DELETED" + RUNNING = "RUNNING" + + +@dataclass +class JobStatus: + stage: JobStage + message: Optional[str] + + +@dataclass +class JobOwner: + id: str + name: str + type: str + + +@dataclass +class JobInfo: + """ + Contains information about a Job. + + Args: + id (`str`): + Job ID. + created_at (`datetime` or `None`): + When the Job was created. + docker_image (`str` or `None`): + The Docker image from Docker Hub used for the Job. + Can be None if space_id is present instead. + space_id (`str` or `None`): + The Docker image from Hugging Face Spaces used for the Job. + Can be None if docker_image is present instead. + command (`List[str]` or `None`): + Command of the Job, e.g. `["python", "-c", "print('hello world')"]` + arguments (`List[str]` or `None`): + Arguments passed to the command + environment (`Dict[str]` or `None`): + Environment variables of the Job as a dictionary. + secrets (`Dict[str]` or `None`): + Secret environment variables of the Job (encrypted). + flavor (`str` or `None`): + Flavor for the hardware, as in Hugging Face Spaces. See [`SpaceHardware`] for possible values. + E.g. `"cpu-basic"`. + status: (`JobStatus` or `None`): + Status of the Job, e.g. `JobStatus(stage="RUNNING", message=None)` + See [`JobStage`] for possible stage values. + owner: (`JobOwner` or `None`): + Owner of the Job, e.g. `JobOwner(id="5e9ecfc04957053f60648a3e", name="lhoestq", type="user")` + + Example: + + ```python + >>> from huggingface_hub import run_job + >>> job = run_job( + ... image="python:3.12", + ... command=["python", "-c", "print('Hello from the cloud!')"] + ... ) + >>> job + JobInfo(id='687fb701029421ae5549d998', created_at=datetime.datetime(2025, 7, 22, 16, 6, 25, 79000, tzinfo=datetime.timezone.utc), docker_image='python:3.12', space_id=None, command=['python', '-c', "print('Hello from the cloud!')"], arguments=[], environment={}, secrets={}, flavor='cpu-basic', status=JobStatus(stage='RUNNING', message=None), owner=JobOwner(id='5e9ecfc04957053f60648a3e', name='lhoestq', type='user'), endpoint='https://huggingface.co', url='https://huggingface.co/jobs/lhoestq/687fb701029421ae5549d998') + >>> job.id + '687fb701029421ae5549d998' + >>> job.url + 'https://huggingface.co/jobs/lhoestq/687fb701029421ae5549d998' + >>> job.status.stage + 'RUNNING' + ``` + """ + + id: str + created_at: Optional[datetime] + docker_image: Optional[str] + space_id: Optional[str] + command: Optional[List[str]] + arguments: Optional[List[str]] + environment: Optional[Dict[str, Any]] + secrets: Optional[Dict[str, Any]] + flavor: Optional[SpaceHardware] + status: JobStatus + owner: JobOwner + + # Inferred fields + endpoint: str + url: str + + def __init__(self, **kwargs) -> None: + self.id = kwargs["id"] + created_at = kwargs.get("createdAt") or kwargs.get("created_at") + self.created_at = parse_datetime(created_at) if created_at else None + self.docker_image = kwargs.get("dockerImage") or kwargs.get("docker_image") + self.space_id = kwargs.get("spaceId") or kwargs.get("space_id") + owner = kwargs.get("owner", {}) + self.owner = JobOwner(id=owner["id"], name=owner["name"], type=owner["type"]) + self.command = kwargs.get("command") + self.arguments = kwargs.get("arguments") + self.environment = kwargs.get("environment") + self.secrets = kwargs.get("secrets") + self.flavor = kwargs.get("flavor") + status = kwargs.get("status", {}) + self.status = JobStatus(stage=status["stage"], message=status.get("message")) + + # Inferred fields + self.endpoint = kwargs.get("endpoint", constants.ENDPOINT) + self.url = f"{self.endpoint}/jobs/{self.owner.name}/{self.id}" + + +@dataclass +class JobSpec: + docker_image: Optional[str] + space_id: Optional[str] + command: Optional[List[str]] + arguments: Optional[List[str]] + environment: Optional[Dict[str, Any]] + secrets: Optional[Dict[str, Any]] + flavor: Optional[SpaceHardware] + timeout: Optional[int] + tags: Optional[List[str]] + arch: Optional[str] + + def __init__(self, **kwargs) -> None: + self.docker_image = kwargs.get("dockerImage") or kwargs.get("docker_image") + self.space_id = kwargs.get("spaceId") or kwargs.get("space_id") + self.command = kwargs.get("command") + self.arguments = kwargs.get("arguments") + self.environment = kwargs.get("environment") + self.secrets = kwargs.get("secrets") + self.flavor = kwargs.get("flavor") + self.timeout = kwargs.get("timeout") + self.tags = kwargs.get("tags") + self.arch = kwargs.get("arch") + + +@dataclass +class LastJobInfo: + id: str + at: datetime + + def __init__(self, **kwargs) -> None: + self.id = kwargs["id"] + self.at = parse_datetime(kwargs["at"]) + + +@dataclass +class ScheduledJobStatus: + last_job: Optional[LastJobInfo] + next_job_run_at: Optional[datetime] + + def __init__(self, **kwargs) -> None: + last_job = kwargs.get("lastJob") or kwargs.get("last_job") + self.last_job = LastJobInfo(**last_job) if last_job else None + next_job_run_at = kwargs.get("nextJobRunAt") or kwargs.get("next_job_run_at") + self.next_job_run_at = parse_datetime(str(next_job_run_at)) if next_job_run_at else None + + +@dataclass +class ScheduledJobInfo: + """ + Contains information about a Job. + + Args: + id (`str`): + Scheduled Job ID. + created_at (`datetime` or `None`): + When the scheduled Job was created. + tags (`List[str]` or `None`): + The tags of the scheduled Job. + schedule (`str` or `None`): + One of "@annually", "@yearly", "@monthly", "@weekly", "@daily", "@hourly", or a + CRON schedule expression (e.g., '0 9 * * 1' for 9 AM every Monday). + suspend (`bool` or `None`): + Whether the scheduled job is suspended (paused). + concurrency (`bool` or `None`): + Whether multiple instances of this Job can run concurrently. + status (`ScheduledJobStatus` or `None`): + Status of the scheduled Job. + owner: (`JobOwner` or `None`): + Owner of the scheduled Job, e.g. `JobOwner(id="5e9ecfc04957053f60648a3e", name="lhoestq", type="user")` + job_spec: (`JobSpec` or `None`): + Specifications of the Job. + + Example: + + ```python + >>> from huggingface_hub import run_job + >>> scheduled_job = create_scheduled_job( + ... image="python:3.12", + ... command=["python", "-c", "print('Hello from the cloud!')"], + ... schedule="@hourly", + ... ) + >>> scheduled_job.id + '687fb701029421ae5549d999' + >>> scheduled_job.status.next_job_run_at + datetime.datetime(2025, 7, 22, 17, 6, 25, 79000, tzinfo=datetime.timezone.utc) + ``` + """ + + id: str + created_at: Optional[datetime] + job_spec: JobSpec + schedule: Optional[str] + suspend: Optional[bool] + concurrency: Optional[bool] + status: ScheduledJobStatus + owner: JobOwner + + def __init__(self, **kwargs) -> None: + self.id = kwargs["id"] + created_at = kwargs.get("createdAt") or kwargs.get("created_at") + self.created_at = parse_datetime(created_at) if created_at else None + self.job_spec = JobSpec(**(kwargs.get("job_spec") or kwargs.get("jobSpec", {}))) + self.schedule = kwargs.get("schedule") + self.suspend = kwargs.get("suspend") + self.concurrency = kwargs.get("concurrency") + status = kwargs.get("status", {}) + self.status = ScheduledJobStatus( + last_job=status.get("last_job") or status.get("lastJob"), + next_job_run_at=status.get("next_job_run_at") or status.get("nextJobRunAt"), + ) + owner = kwargs.get("owner", {}) + self.owner = JobOwner(id=owner["id"], name=owner["name"], type=owner["type"]) + + +def _create_job_spec( + *, + image: str, + command: List[str], + env: Optional[Dict[str, Any]], + secrets: Optional[Dict[str, Any]], + flavor: Optional[SpaceHardware], + timeout: Optional[Union[int, float, str]], +) -> Dict[str, Any]: + # prepare job spec to send to HF Jobs API + job_spec: Dict[str, Any] = { + "command": command, + "arguments": [], + "environment": env or {}, + "flavor": flavor or SpaceHardware.CPU_BASIC, + } + # secrets are optional + if secrets: + job_spec["secrets"] = secrets + # timeout is optional + if timeout: + time_units_factors = {"s": 1, "m": 60, "h": 3600, "d": 3600 * 24} + if isinstance(timeout, str) and timeout[-1] in time_units_factors: + job_spec["timeoutSeconds"] = int(float(timeout[:-1]) * time_units_factors[timeout[-1]]) + else: + job_spec["timeoutSeconds"] = int(timeout) + # input is either from docker hub or from HF spaces + for prefix in ( + "https://huggingface.co/spaces/", + "https://hf.co/spaces/", + "huggingface.co/spaces/", + "hf.co/spaces/", + ): + if image.startswith(prefix): + job_spec["spaceId"] = image[len(prefix) :] + break + else: + job_spec["dockerImage"] = image + return job_spec diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/_local_folder.py b/venv/lib/python3.13/site-packages/huggingface_hub/_local_folder.py new file mode 100644 index 0000000000000000000000000000000000000000..37f6c32a760ecf03794c129735fe2e15516952d1 --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/_local_folder.py @@ -0,0 +1,447 @@ +# coding=utf-8 +# Copyright 2024-present, the HuggingFace Inc. team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Contains utilities to handle the `../.cache/huggingface` folder in local directories. + +First discussed in https://github.com/huggingface/huggingface_hub/issues/1738 to store +download metadata when downloading files from the hub to a local directory (without +using the cache). + +./.cache/huggingface folder structure: +[4.0K] data +├── [4.0K] .cache +│ └── [4.0K] huggingface +│ └── [4.0K] download +│ ├── [ 16] file.parquet.metadata +│ ├── [ 16] file.txt.metadata +│ └── [4.0K] folder +│ └── [ 16] file.parquet.metadata +│ +├── [6.5G] file.parquet +├── [1.5K] file.txt +└── [4.0K] folder + └── [ 16] file.parquet + + +Download metadata file structure: +``` +# file.txt.metadata +11c5a3d5811f50298f278a704980280950aedb10 +a16a55fda99d2f2e7b69cce5cf93ff4ad3049930 +1712656091.123 + +# file.parquet.metadata +11c5a3d5811f50298f278a704980280950aedb10 +7c5d3f4b8b76583b422fcb9189ad6c89d5d97a094541ce8932dce3ecabde1421 +1712656091.123 +} +``` +""" + +import base64 +import hashlib +import logging +import os +import time +from dataclasses import dataclass +from pathlib import Path +from typing import Optional + +from .utils import WeakFileLock + + +logger = logging.getLogger(__name__) + + +@dataclass +class LocalDownloadFilePaths: + """ + Paths to the files related to a download process in a local dir. + + Returned by [`get_local_download_paths`]. + + Attributes: + file_path (`Path`): + Path where the file will be saved. + lock_path (`Path`): + Path to the lock file used to ensure atomicity when reading/writing metadata. + metadata_path (`Path`): + Path to the metadata file. + """ + + file_path: Path + lock_path: Path + metadata_path: Path + + def incomplete_path(self, etag: str) -> Path: + """Return the path where a file will be temporarily downloaded before being moved to `file_path`.""" + path = self.metadata_path.parent / f"{_short_hash(self.metadata_path.name)}.{etag}.incomplete" + resolved_path = str(path.resolve()) + # Some Windows versions do not allow for paths longer than 255 characters. + # In this case, we must specify it as an extended path by using the "\\?\" prefix. + if os.name == "nt" and len(resolved_path) > 255 and not resolved_path.startswith("\\\\?\\"): + path = Path("\\\\?\\" + resolved_path) + return path + + +@dataclass(frozen=True) +class LocalUploadFilePaths: + """ + Paths to the files related to an upload process in a local dir. + + Returned by [`get_local_upload_paths`]. + + Attributes: + path_in_repo (`str`): + Path of the file in the repo. + file_path (`Path`): + Path where the file will be saved. + lock_path (`Path`): + Path to the lock file used to ensure atomicity when reading/writing metadata. + metadata_path (`Path`): + Path to the metadata file. + """ + + path_in_repo: str + file_path: Path + lock_path: Path + metadata_path: Path + + +@dataclass +class LocalDownloadFileMetadata: + """ + Metadata about a file in the local directory related to a download process. + + Attributes: + filename (`str`): + Path of the file in the repo. + commit_hash (`str`): + Commit hash of the file in the repo. + etag (`str`): + ETag of the file in the repo. Used to check if the file has changed. + For LFS files, this is the sha256 of the file. For regular files, it corresponds to the git hash. + timestamp (`int`): + Unix timestamp of when the metadata was saved i.e. when the metadata was accurate. + """ + + filename: str + commit_hash: str + etag: str + timestamp: float + + +@dataclass +class LocalUploadFileMetadata: + """ + Metadata about a file in the local directory related to an upload process. + """ + + size: int + + # Default values correspond to "we don't know yet" + timestamp: Optional[float] = None + should_ignore: Optional[bool] = None + sha256: Optional[str] = None + upload_mode: Optional[str] = None + remote_oid: Optional[str] = None + is_uploaded: bool = False + is_committed: bool = False + + def save(self, paths: LocalUploadFilePaths) -> None: + """Save the metadata to disk.""" + with WeakFileLock(paths.lock_path): + with paths.metadata_path.open("w") as f: + new_timestamp = time.time() + f.write(str(new_timestamp) + "\n") + + f.write(str(self.size)) # never None + f.write("\n") + + if self.should_ignore is not None: + f.write(str(int(self.should_ignore))) + f.write("\n") + + if self.sha256 is not None: + f.write(self.sha256) + f.write("\n") + + if self.upload_mode is not None: + f.write(self.upload_mode) + f.write("\n") + + if self.remote_oid is not None: + f.write(self.remote_oid) + f.write("\n") + + f.write(str(int(self.is_uploaded)) + "\n") + f.write(str(int(self.is_committed)) + "\n") + + self.timestamp = new_timestamp + + +def get_local_download_paths(local_dir: Path, filename: str) -> LocalDownloadFilePaths: + """Compute paths to the files related to a download process. + + Folders containing the paths are all guaranteed to exist. + + Args: + local_dir (`Path`): + Path to the local directory in which files are downloaded. + filename (`str`): + Path of the file in the repo. + + Return: + [`LocalDownloadFilePaths`]: the paths to the files (file_path, lock_path, metadata_path, incomplete_path). + """ + # filename is the path in the Hub repository (separated by '/') + # make sure to have a cross platform transcription + sanitized_filename = os.path.join(*filename.split("/")) + if os.name == "nt": + if sanitized_filename.startswith("..\\") or "\\..\\" in sanitized_filename: + raise ValueError( + f"Invalid filename: cannot handle filename '{sanitized_filename}' on Windows. Please ask the repository" + " owner to rename this file." + ) + file_path = local_dir / sanitized_filename + metadata_path = _huggingface_dir(local_dir) / "download" / f"{sanitized_filename}.metadata" + lock_path = metadata_path.with_suffix(".lock") + + # Some Windows versions do not allow for paths longer than 255 characters. + # In this case, we must specify it as an extended path by using the "\\?\" prefix + if os.name == "nt": + if not str(local_dir).startswith("\\\\?\\") and len(os.path.abspath(lock_path)) > 255: + file_path = Path("\\\\?\\" + os.path.abspath(file_path)) + lock_path = Path("\\\\?\\" + os.path.abspath(lock_path)) + metadata_path = Path("\\\\?\\" + os.path.abspath(metadata_path)) + + file_path.parent.mkdir(parents=True, exist_ok=True) + metadata_path.parent.mkdir(parents=True, exist_ok=True) + return LocalDownloadFilePaths(file_path=file_path, lock_path=lock_path, metadata_path=metadata_path) + + +def get_local_upload_paths(local_dir: Path, filename: str) -> LocalUploadFilePaths: + """Compute paths to the files related to an upload process. + + Folders containing the paths are all guaranteed to exist. + + Args: + local_dir (`Path`): + Path to the local directory that is uploaded. + filename (`str`): + Path of the file in the repo. + + Return: + [`LocalUploadFilePaths`]: the paths to the files (file_path, lock_path, metadata_path). + """ + # filename is the path in the Hub repository (separated by '/') + # make sure to have a cross platform transcription + sanitized_filename = os.path.join(*filename.split("/")) + if os.name == "nt": + if sanitized_filename.startswith("..\\") or "\\..\\" in sanitized_filename: + raise ValueError( + f"Invalid filename: cannot handle filename '{sanitized_filename}' on Windows. Please ask the repository" + " owner to rename this file." + ) + file_path = local_dir / sanitized_filename + metadata_path = _huggingface_dir(local_dir) / "upload" / f"{sanitized_filename}.metadata" + lock_path = metadata_path.with_suffix(".lock") + + # Some Windows versions do not allow for paths longer than 255 characters. + # In this case, we must specify it as an extended path by using the "\\?\" prefix + if os.name == "nt": + if not str(local_dir).startswith("\\\\?\\") and len(os.path.abspath(lock_path)) > 255: + file_path = Path("\\\\?\\" + os.path.abspath(file_path)) + lock_path = Path("\\\\?\\" + os.path.abspath(lock_path)) + metadata_path = Path("\\\\?\\" + os.path.abspath(metadata_path)) + + file_path.parent.mkdir(parents=True, exist_ok=True) + metadata_path.parent.mkdir(parents=True, exist_ok=True) + return LocalUploadFilePaths( + path_in_repo=filename, file_path=file_path, lock_path=lock_path, metadata_path=metadata_path + ) + + +def read_download_metadata(local_dir: Path, filename: str) -> Optional[LocalDownloadFileMetadata]: + """Read metadata about a file in the local directory related to a download process. + + Args: + local_dir (`Path`): + Path to the local directory in which files are downloaded. + filename (`str`): + Path of the file in the repo. + + Return: + `[LocalDownloadFileMetadata]` or `None`: the metadata if it exists, `None` otherwise. + """ + paths = get_local_download_paths(local_dir, filename) + with WeakFileLock(paths.lock_path): + if paths.metadata_path.exists(): + try: + with paths.metadata_path.open() as f: + commit_hash = f.readline().strip() + etag = f.readline().strip() + timestamp = float(f.readline().strip()) + metadata = LocalDownloadFileMetadata( + filename=filename, + commit_hash=commit_hash, + etag=etag, + timestamp=timestamp, + ) + except Exception as e: + # remove the metadata file if it is corrupted / not the right format + logger.warning( + f"Invalid metadata file {paths.metadata_path}: {e}. Removing it from disk and continue." + ) + try: + paths.metadata_path.unlink() + except Exception as e: + logger.warning(f"Could not remove corrupted metadata file {paths.metadata_path}: {e}") + + try: + # check if the file exists and hasn't been modified since the metadata was saved + stat = paths.file_path.stat() + if ( + stat.st_mtime - 1 <= metadata.timestamp + ): # allow 1s difference as stat.st_mtime might not be precise + return metadata + logger.info(f"Ignored metadata for '{filename}' (outdated). Will re-compute hash.") + except FileNotFoundError: + # file does not exist => metadata is outdated + return None + return None + + +def read_upload_metadata(local_dir: Path, filename: str) -> LocalUploadFileMetadata: + """Read metadata about a file in the local directory related to an upload process. + + TODO: factorize logic with `read_download_metadata`. + + Args: + local_dir (`Path`): + Path to the local directory in which files are downloaded. + filename (`str`): + Path of the file in the repo. + + Return: + `[LocalUploadFileMetadata]` or `None`: the metadata if it exists, `None` otherwise. + """ + paths = get_local_upload_paths(local_dir, filename) + with WeakFileLock(paths.lock_path): + if paths.metadata_path.exists(): + try: + with paths.metadata_path.open() as f: + timestamp = float(f.readline().strip()) + + size = int(f.readline().strip()) # never None + + _should_ignore = f.readline().strip() + should_ignore = None if _should_ignore == "" else bool(int(_should_ignore)) + + _sha256 = f.readline().strip() + sha256 = None if _sha256 == "" else _sha256 + + _upload_mode = f.readline().strip() + upload_mode = None if _upload_mode == "" else _upload_mode + if upload_mode not in (None, "regular", "lfs"): + raise ValueError(f"Invalid upload mode in metadata {paths.path_in_repo}: {upload_mode}") + + _remote_oid = f.readline().strip() + remote_oid = None if _remote_oid == "" else _remote_oid + + is_uploaded = bool(int(f.readline().strip())) + is_committed = bool(int(f.readline().strip())) + + metadata = LocalUploadFileMetadata( + timestamp=timestamp, + size=size, + should_ignore=should_ignore, + sha256=sha256, + upload_mode=upload_mode, + remote_oid=remote_oid, + is_uploaded=is_uploaded, + is_committed=is_committed, + ) + except Exception as e: + # remove the metadata file if it is corrupted / not the right format + logger.warning( + f"Invalid metadata file {paths.metadata_path}: {e}. Removing it from disk and continue." + ) + try: + paths.metadata_path.unlink() + except Exception as e: + logger.warning(f"Could not remove corrupted metadata file {paths.metadata_path}: {e}") + + # TODO: can we do better? + if ( + metadata.timestamp is not None + and metadata.is_uploaded # file was uploaded + and not metadata.is_committed # but not committed + and time.time() - metadata.timestamp > 20 * 3600 # and it's been more than 20 hours + ): # => we consider it as garbage-collected by S3 + metadata.is_uploaded = False + + # check if the file exists and hasn't been modified since the metadata was saved + try: + if metadata.timestamp is not None and paths.file_path.stat().st_mtime <= metadata.timestamp: + return metadata + logger.info(f"Ignored metadata for '{filename}' (outdated). Will re-compute hash.") + except FileNotFoundError: + # file does not exist => metadata is outdated + pass + + # empty metadata => we don't know anything expect its size + return LocalUploadFileMetadata(size=paths.file_path.stat().st_size) + + +def write_download_metadata(local_dir: Path, filename: str, commit_hash: str, etag: str) -> None: + """Write metadata about a file in the local directory related to a download process. + + Args: + local_dir (`Path`): + Path to the local directory in which files are downloaded. + """ + paths = get_local_download_paths(local_dir, filename) + with WeakFileLock(paths.lock_path): + with paths.metadata_path.open("w") as f: + f.write(f"{commit_hash}\n{etag}\n{time.time()}\n") + + +def _huggingface_dir(local_dir: Path) -> Path: + """Return the path to the `.cache/huggingface` directory in a local directory.""" + # Wrap in lru_cache to avoid overwriting the .gitignore file if called multiple times + path = local_dir / ".cache" / "huggingface" + path.mkdir(exist_ok=True, parents=True) + + # Create a .gitignore file in the .cache/huggingface directory if it doesn't exist + # Should be thread-safe enough like this. + gitignore = path / ".gitignore" + gitignore_lock = path / ".gitignore.lock" + if not gitignore.exists(): + try: + with WeakFileLock(gitignore_lock, timeout=0.1): + gitignore.write_text("*") + except IndexError: + pass + except OSError: # TimeoutError, FileNotFoundError, PermissionError, etc. + pass + try: + gitignore_lock.unlink() + except OSError: + pass + return path + + +def _short_hash(filename: str) -> str: + return base64.urlsafe_b64encode(hashlib.sha1(filename.encode()).digest()).decode() diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/_login.py b/venv/lib/python3.13/site-packages/huggingface_hub/_login.py new file mode 100644 index 0000000000000000000000000000000000000000..8f721b68348fc3abeb2f90b6a756cb125ce19571 --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/_login.py @@ -0,0 +1,514 @@ +# Copyright 2020 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Contains methods to log in to the Hub.""" + +import os +import subprocess +from getpass import getpass +from pathlib import Path +from typing import Optional + +from . import constants +from .commands._cli_utils import ANSI +from .utils import ( + capture_output, + get_token, + is_google_colab, + is_notebook, + list_credential_helpers, + logging, + run_subprocess, + set_git_credential, + unset_git_credential, +) +from .utils._auth import ( + _get_token_by_name, + _get_token_from_environment, + _get_token_from_file, + _get_token_from_google_colab, + _save_stored_tokens, + _save_token, + get_stored_tokens, +) +from .utils._deprecation import _deprecate_arguments, _deprecate_positional_args + + +logger = logging.get_logger(__name__) + +_HF_LOGO_ASCII = """ + _| _| _| _| _|_|_| _|_|_| _|_|_| _| _| _|_|_| _|_|_|_| _|_| _|_|_| _|_|_|_| + _| _| _| _| _| _| _| _|_| _| _| _| _| _| _| _| + _|_|_|_| _| _| _| _|_| _| _|_| _| _| _| _| _| _|_| _|_|_| _|_|_|_| _| _|_|_| + _| _| _| _| _| _| _| _| _| _| _|_| _| _| _| _| _| _| _| + _| _| _|_| _|_|_| _|_|_| _|_|_| _| _| _|_|_| _| _| _| _|_|_| _|_|_|_| +""" + + +@_deprecate_arguments( + version="1.0", + deprecated_args="write_permission", + custom_message="Fine-grained tokens added complexity to the permissions, making it irrelevant to check if a token has 'write' access.", +) +@_deprecate_positional_args(version="1.0") +def login( + token: Optional[str] = None, + *, + add_to_git_credential: bool = False, + new_session: bool = True, + write_permission: bool = False, +) -> None: + """Login the machine to access the Hub. + + The `token` is persisted in cache and set as a git credential. Once done, the machine + is logged in and the access token will be available across all `huggingface_hub` + components. If `token` is not provided, it will be prompted to the user either with + a widget (in a notebook) or via the terminal. + + To log in from outside of a script, one can also use `hf auth login` which is + a cli command that wraps [`login`]. + + > [!TIP] + > [`login`] is a drop-in replacement method for [`notebook_login`] as it wraps and + > extends its capabilities. + + > [!TIP] + > When the token is not passed, [`login`] will automatically detect if the script runs + > in a notebook or not. However, this detection might not be accurate due to the + > variety of notebooks that exists nowadays. If that is the case, you can always force + > the UI by using [`notebook_login`] or [`interpreter_login`]. + + Args: + token (`str`, *optional*): + User access token to generate from https://huggingface.co/settings/token. + add_to_git_credential (`bool`, defaults to `False`): + If `True`, token will be set as git credential. If no git credential helper + is configured, a warning will be displayed to the user. If `token` is `None`, + the value of `add_to_git_credential` is ignored and will be prompted again + to the end user. + new_session (`bool`, defaults to `True`): + If `True`, will request a token even if one is already saved on the machine. + write_permission (`bool`): + Ignored and deprecated argument. + Raises: + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + If an organization token is passed. Only personal account tokens are valid + to log in. + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + If token is invalid. + [`ImportError`](https://docs.python.org/3/library/exceptions.html#ImportError) + If running in a notebook but `ipywidgets` is not installed. + """ + if token is not None: + if not add_to_git_credential: + logger.info( + "The token has not been saved to the git credentials helper. Pass " + "`add_to_git_credential=True` in this function directly or " + "`--add-to-git-credential` if using via `hf`CLI if " + "you want to set the git credential as well." + ) + _login(token, add_to_git_credential=add_to_git_credential) + elif is_notebook(): + notebook_login(new_session=new_session) + else: + interpreter_login(new_session=new_session) + + +def logout(token_name: Optional[str] = None) -> None: + """Logout the machine from the Hub. + + Token is deleted from the machine and removed from git credential. + + Args: + token_name (`str`, *optional*): + Name of the access token to logout from. If `None`, will logout from all saved access tokens. + Raises: + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError): + If the access token name is not found. + """ + if get_token() is None and not get_stored_tokens(): # No active token and no saved access tokens + logger.warning("Not logged in!") + return + if not token_name: + # Delete all saved access tokens and token + for file_path in (constants.HF_TOKEN_PATH, constants.HF_STORED_TOKENS_PATH): + try: + Path(file_path).unlink() + except FileNotFoundError: + pass + logger.info("Successfully logged out from all access tokens.") + else: + _logout_from_token(token_name) + logger.info(f"Successfully logged out from access token: {token_name}.") + + unset_git_credential() + + # Check if still logged in + if _get_token_from_google_colab() is not None: + raise EnvironmentError( + "You are automatically logged in using a Google Colab secret.\n" + "To log out, you must unset the `HF_TOKEN` secret in your Colab settings." + ) + if _get_token_from_environment() is not None: + raise EnvironmentError( + "Token has been deleted from your machine but you are still logged in.\n" + "To log out, you must clear out both `HF_TOKEN` and `HUGGING_FACE_HUB_TOKEN` environment variables." + ) + + +def auth_switch(token_name: str, add_to_git_credential: bool = False) -> None: + """Switch to a different access token. + + Args: + token_name (`str`): + Name of the access token to switch to. + add_to_git_credential (`bool`, defaults to `False`): + If `True`, token will be set as git credential. If no git credential helper + is configured, a warning will be displayed to the user. If `token` is `None`, + the value of `add_to_git_credential` is ignored and will be prompted again + to the end user. + + Raises: + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError): + If the access token name is not found. + """ + token = _get_token_by_name(token_name) + if not token: + raise ValueError(f"Access token {token_name} not found in {constants.HF_STORED_TOKENS_PATH}") + # Write token to HF_TOKEN_PATH + _set_active_token(token_name, add_to_git_credential) + logger.info(f"The current active token is: {token_name}") + token_from_environment = _get_token_from_environment() + if token_from_environment is not None and token_from_environment != token: + logger.warning( + "The environment variable `HF_TOKEN` is set and will override the access token you've just switched to." + ) + + +def auth_list() -> None: + """List all stored access tokens.""" + tokens = get_stored_tokens() + + if not tokens: + logger.info("No access tokens found.") + return + # Find current token + current_token = get_token() + current_token_name = None + for token_name in tokens: + if tokens.get(token_name) == current_token: + current_token_name = token_name + # Print header + max_offset = max(len("token"), max(len(token) for token in tokens)) + 2 + print(f" {{:<{max_offset}}}| {{:<15}}".format("name", "token")) + print("-" * (max_offset + 2) + "|" + "-" * 15) + + # Print saved access tokens + for token_name in tokens: + token = tokens.get(token_name, "") + masked_token = f"{token[:3]}****{token[-4:]}" if token != "" else token + is_current = "*" if token == current_token else " " + + print(f"{is_current} {{:<{max_offset}}}| {{:<15}}".format(token_name, masked_token)) + + if _get_token_from_environment(): + logger.warning( + "\nNote: Environment variable `HF_TOKEN` is set and is the current active token independently from the stored tokens listed above." + ) + elif current_token_name is None: + logger.warning( + "\nNote: No active token is set and no environment variable `HF_TOKEN` is found. Use `hf auth login` to log in." + ) + + +### +# Interpreter-based login (text) +### + + +@_deprecate_arguments( + version="1.0", + deprecated_args="write_permission", + custom_message="Fine-grained tokens added complexity to the permissions, making it irrelevant to check if a token has 'write' access.", +) +@_deprecate_positional_args(version="1.0") +def interpreter_login(*, new_session: bool = True, write_permission: bool = False) -> None: + """ + Displays a prompt to log in to the HF website and store the token. + + This is equivalent to [`login`] without passing a token when not run in a notebook. + [`interpreter_login`] is useful if you want to force the use of the terminal prompt + instead of a notebook widget. + + For more details, see [`login`]. + + Args: + new_session (`bool`, defaults to `True`): + If `True`, will request a token even if one is already saved on the machine. + write_permission (`bool`): + Ignored and deprecated argument. + """ + if not new_session and get_token() is not None: + logger.info("User is already logged in.") + return + + from .commands.delete_cache import _ask_for_confirmation_no_tui + + print(_HF_LOGO_ASCII) + if get_token() is not None: + logger.info( + " A token is already saved on your machine. Run `hf auth whoami`" + " to get more information or `hf auth logout` if you want" + " to log out." + ) + logger.info(" Setting a new token will erase the existing one.") + + logger.info( + " To log in, `huggingface_hub` requires a token generated from https://huggingface.co/settings/tokens ." + ) + if os.name == "nt": + logger.info("Token can be pasted using 'Right-Click'.") + token = getpass("Enter your token (input will not be visible): ") + add_to_git_credential = _ask_for_confirmation_no_tui("Add token as git credential?") + + _login(token=token, add_to_git_credential=add_to_git_credential) + + +### +# Notebook-based login (widget) +### + +NOTEBOOK_LOGIN_PASSWORD_HTML = """

Immediately click login after typing your password or +it might be stored in plain text in this notebook file.
""" + + +NOTEBOOK_LOGIN_TOKEN_HTML_START = """

Copy a token from your Hugging Face +tokens page and paste it below.
Immediately click login after copying +your token or it might be stored in plain text in this notebook file.
""" + + +NOTEBOOK_LOGIN_TOKEN_HTML_END = """ +Pro Tip: If you don't already have one, you can create a dedicated +'notebooks' token with 'write' access, that you can then easily reuse for all +notebooks. """ + + +@_deprecate_arguments( + version="1.0", + deprecated_args="write_permission", + custom_message="Fine-grained tokens added complexity to the permissions, making it irrelevant to check if a token has 'write' access.", +) +@_deprecate_positional_args(version="1.0") +def notebook_login(*, new_session: bool = True, write_permission: bool = False) -> None: + """ + Displays a widget to log in to the HF website and store the token. + + This is equivalent to [`login`] without passing a token when run in a notebook. + [`notebook_login`] is useful if you want to force the use of the notebook widget + instead of a prompt in the terminal. + + For more details, see [`login`]. + + Args: + new_session (`bool`, defaults to `True`): + If `True`, will request a token even if one is already saved on the machine. + write_permission (`bool`): + Ignored and deprecated argument. + """ + try: + import ipywidgets.widgets as widgets # type: ignore + from IPython.display import display # type: ignore + except ImportError: + raise ImportError( + "The `notebook_login` function can only be used in a notebook (Jupyter or" + " Colab) and you need the `ipywidgets` module: `pip install ipywidgets`." + ) + if not new_session and get_token() is not None: + logger.info("User is already logged in.") + return + + box_layout = widgets.Layout(display="flex", flex_flow="column", align_items="center", width="50%") + + token_widget = widgets.Password(description="Token:") + git_checkbox_widget = widgets.Checkbox(value=True, description="Add token as git credential?") + token_finish_button = widgets.Button(description="Login") + + login_token_widget = widgets.VBox( + [ + widgets.HTML(NOTEBOOK_LOGIN_TOKEN_HTML_START), + token_widget, + git_checkbox_widget, + token_finish_button, + widgets.HTML(NOTEBOOK_LOGIN_TOKEN_HTML_END), + ], + layout=box_layout, + ) + display(login_token_widget) + + # On click events + def login_token_event(t): + """Event handler for the login button.""" + token = token_widget.value + add_to_git_credential = git_checkbox_widget.value + # Erase token and clear value to make sure it's not saved in the notebook. + token_widget.value = "" + # Hide inputs + login_token_widget.children = [widgets.Label("Connecting...")] + try: + with capture_output() as captured: + _login(token, add_to_git_credential=add_to_git_credential) + message = captured.getvalue() + except Exception as error: + message = str(error) + # Print result (success message or error) + login_token_widget.children = [widgets.Label(line) for line in message.split("\n") if line.strip()] + + token_finish_button.on_click(login_token_event) + + +### +# Login private helpers +### + + +def _login( + token: str, + add_to_git_credential: bool, +) -> None: + from .hf_api import whoami # avoid circular import + + if token.startswith("api_org"): + raise ValueError("You must use your personal account token, not an organization token.") + + token_info = whoami(token) + permission = token_info["auth"]["accessToken"]["role"] + logger.info(f"Token is valid (permission: {permission}).") + + token_name = token_info["auth"]["accessToken"]["displayName"] + # Store token locally + _save_token(token=token, token_name=token_name) + # Set active token + _set_active_token(token_name=token_name, add_to_git_credential=add_to_git_credential) + logger.info("Login successful.") + if _get_token_from_environment(): + logger.warning( + "Note: Environment variable`HF_TOKEN` is set and is the current active token independently from the token you've just configured." + ) + else: + logger.info(f"The current active token is: `{token_name}`") + + +def _logout_from_token(token_name: str) -> None: + """Logout from a specific access token. + + Args: + token_name (`str`): + The name of the access token to logout from. + Raises: + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError): + If the access token name is not found. + """ + stored_tokens = get_stored_tokens() + # If there is no access tokens saved or the access token name is not found, do nothing + if not stored_tokens or token_name not in stored_tokens: + return + + token = stored_tokens.pop(token_name) + _save_stored_tokens(stored_tokens) + + if token == _get_token_from_file(): + logger.warning(f"Active token '{token_name}' has been deleted.") + Path(constants.HF_TOKEN_PATH).unlink(missing_ok=True) + + +def _set_active_token( + token_name: str, + add_to_git_credential: bool, +) -> None: + """Set the active access token. + + Args: + token_name (`str`): + The name of the token to set as active. + """ + token = _get_token_by_name(token_name) + if not token: + raise ValueError(f"Token {token_name} not found in {constants.HF_STORED_TOKENS_PATH}") + if add_to_git_credential: + if _is_git_credential_helper_configured(): + set_git_credential(token) + logger.info( + "Your token has been saved in your configured git credential helpers" + + f" ({','.join(list_credential_helpers())})." + ) + else: + logger.warning("Token has not been saved to git credential helper.") + # Write token to HF_TOKEN_PATH + path = Path(constants.HF_TOKEN_PATH) + path.parent.mkdir(parents=True, exist_ok=True) + path.write_text(token) + logger.info(f"Your token has been saved to {constants.HF_TOKEN_PATH}") + + +def _is_git_credential_helper_configured() -> bool: + """Check if a git credential helper is configured. + + Warns user if not the case (except for Google Colab where "store" is set by default + by `huggingface_hub`). + """ + helpers = list_credential_helpers() + if len(helpers) > 0: + return True # Do not warn: at least 1 helper is set + + # Only in Google Colab to avoid the warning message + # See https://github.com/huggingface/huggingface_hub/issues/1043#issuecomment-1247010710 + if is_google_colab(): + _set_store_as_git_credential_helper_globally() + return True # Do not warn: "store" is used by default in Google Colab + + # Otherwise, warn user + print( + ANSI.red( + "Cannot authenticate through git-credential as no helper is defined on your" + " machine.\nYou might have to re-authenticate when pushing to the Hugging" + " Face Hub.\nRun the following command in your terminal in case you want to" + " set the 'store' credential helper as default.\n\ngit config --global" + " credential.helper store\n\nRead" + " https://git-scm.com/book/en/v2/Git-Tools-Credential-Storage for more" + " details." + ) + ) + return False + + +def _set_store_as_git_credential_helper_globally() -> None: + """Set globally the credential.helper to `store`. + + To be used only in Google Colab as we assume the user doesn't care about the git + credential config. It is the only particular case where we don't want to display the + warning message in [`notebook_login()`]. + + Related: + - https://github.com/huggingface/huggingface_hub/issues/1043 + - https://github.com/huggingface/huggingface_hub/issues/1051 + - https://git-scm.com/docs/git-credential-store + """ + try: + run_subprocess("git config --global credential.helper store") + except subprocess.CalledProcessError as exc: + raise EnvironmentError(exc.stderr) diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/_oauth.py b/venv/lib/python3.13/site-packages/huggingface_hub/_oauth.py new file mode 100644 index 0000000000000000000000000000000000000000..9f8eb607962bc18fec348fed18ce269524983e23 --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/_oauth.py @@ -0,0 +1,460 @@ +import datetime +import hashlib +import logging +import os +import time +import urllib.parse +import warnings +from dataclasses import dataclass +from typing import TYPE_CHECKING, Dict, List, Literal, Optional, Tuple, Union + +from . import constants +from .hf_api import whoami +from .utils import experimental, get_token + + +logger = logging.getLogger(__name__) + +if TYPE_CHECKING: + import fastapi + + +@dataclass +class OAuthOrgInfo: + """ + Information about an organization linked to a user logged in with OAuth. + + Attributes: + sub (`str`): + Unique identifier for the org. OpenID Connect field. + name (`str`): + The org's full name. OpenID Connect field. + preferred_username (`str`): + The org's username. OpenID Connect field. + picture (`str`): + The org's profile picture URL. OpenID Connect field. + is_enterprise (`bool`): + Whether the org is an enterprise org. Hugging Face field. + can_pay (`Optional[bool]`, *optional*): + Whether the org has a payment method set up. Hugging Face field. + role_in_org (`Optional[str]`, *optional*): + The user's role in the org. Hugging Face field. + security_restrictions (`Optional[List[Literal["ip", "token-policy", "mfa", "sso"]]]`, *optional*): + Array of security restrictions that the user hasn't completed for this org. Possible values: "ip", "token-policy", "mfa", "sso". Hugging Face field. + """ + + sub: str + name: str + preferred_username: str + picture: str + is_enterprise: bool + can_pay: Optional[bool] = None + role_in_org: Optional[str] = None + security_restrictions: Optional[List[Literal["ip", "token-policy", "mfa", "sso"]]] = None + + +@dataclass +class OAuthUserInfo: + """ + Information about a user logged in with OAuth. + + Attributes: + sub (`str`): + Unique identifier for the user, even in case of rename. OpenID Connect field. + name (`str`): + The user's full name. OpenID Connect field. + preferred_username (`str`): + The user's username. OpenID Connect field. + email_verified (`Optional[bool]`, *optional*): + Indicates if the user's email is verified. OpenID Connect field. + email (`Optional[str]`, *optional*): + The user's email address. OpenID Connect field. + picture (`str`): + The user's profile picture URL. OpenID Connect field. + profile (`str`): + The user's profile URL. OpenID Connect field. + website (`Optional[str]`, *optional*): + The user's website URL. OpenID Connect field. + is_pro (`bool`): + Whether the user is a pro user. Hugging Face field. + can_pay (`Optional[bool]`, *optional*): + Whether the user has a payment method set up. Hugging Face field. + orgs (`Optional[List[OrgInfo]]`, *optional*): + List of organizations the user is part of. Hugging Face field. + """ + + sub: str + name: str + preferred_username: str + email_verified: Optional[bool] + email: Optional[str] + picture: str + profile: str + website: Optional[str] + is_pro: bool + can_pay: Optional[bool] + orgs: Optional[List[OAuthOrgInfo]] + + +@dataclass +class OAuthInfo: + """ + Information about the OAuth login. + + Attributes: + access_token (`str`): + The access token. + access_token_expires_at (`datetime.datetime`): + The expiration date of the access token. + user_info ([`OAuthUserInfo`]): + The user information. + state (`str`, *optional*): + State passed to the OAuth provider in the original request to the OAuth provider. + scope (`str`): + Granted scope. + """ + + access_token: str + access_token_expires_at: datetime.datetime + user_info: OAuthUserInfo + state: Optional[str] + scope: str + + +@experimental +def attach_huggingface_oauth(app: "fastapi.FastAPI", route_prefix: str = "/"): + """ + Add OAuth endpoints to a FastAPI app to enable OAuth login with Hugging Face. + + How to use: + - Call this method on your FastAPI app to add the OAuth endpoints. + - Inside your route handlers, call `parse_huggingface_oauth(request)` to retrieve the OAuth info. + - If user is logged in, an [`OAuthInfo`] object is returned with the user's info. If not, `None` is returned. + - In your app, make sure to add links to `/oauth/huggingface/login` and `/oauth/huggingface/logout` for the user to log in and out. + + Example: + ```py + from huggingface_hub import attach_huggingface_oauth, parse_huggingface_oauth + + # Create a FastAPI app + app = FastAPI() + + # Add OAuth endpoints to the FastAPI app + attach_huggingface_oauth(app) + + # Add a route that greets the user if they are logged in + @app.get("/") + def greet_json(request: Request): + # Retrieve the OAuth info from the request + oauth_info = parse_huggingface_oauth(request) # e.g. OAuthInfo dataclass + if oauth_info is None: + return {"msg": "Not logged in!"} + return {"msg": f"Hello, {oauth_info.user_info.preferred_username}!"} + ``` + """ + # TODO: handle generic case (handling OAuth in a non-Space environment with custom dev values) (low priority) + + # Add SessionMiddleware to the FastAPI app to store the OAuth info in the session. + # Session Middleware requires a secret key to sign the cookies. Let's use a hash + # of the OAuth secret key to make it unique to the Space + updated in case OAuth + # config gets updated. When ran locally, we use an empty string as a secret key. + try: + from starlette.middleware.sessions import SessionMiddleware + except ImportError as e: + raise ImportError( + "Cannot initialize OAuth to due a missing library. Please run `pip install huggingface_hub[oauth]` or add " + "`huggingface_hub[oauth]` to your requirements.txt file in order to install the required dependencies." + ) from e + session_secret = (constants.OAUTH_CLIENT_SECRET or "") + "-v1" + app.add_middleware( + SessionMiddleware, # type: ignore[arg-type] + secret_key=hashlib.sha256(session_secret.encode()).hexdigest(), + same_site="none", + https_only=True, + ) # type: ignore + + # Add OAuth endpoints to the FastAPI app: + # - {route_prefix}/oauth/huggingface/login + # - {route_prefix}/oauth/huggingface/callback + # - {route_prefix}/oauth/huggingface/logout + # If the app is running in a Space, OAuth is enabled normally. + # Otherwise, we mock the endpoints to make the user log in with a fake user profile - without any calls to hf.co. + route_prefix = route_prefix.strip("/") + if os.getenv("SPACE_ID") is not None: + logger.info("OAuth is enabled in the Space. Adding OAuth routes.") + _add_oauth_routes(app, route_prefix=route_prefix) + else: + logger.info("App is not running in a Space. Adding mocked OAuth routes.") + _add_mocked_oauth_routes(app, route_prefix=route_prefix) + + +def parse_huggingface_oauth(request: "fastapi.Request") -> Optional[OAuthInfo]: + """ + Returns the information from a logged in user as a [`OAuthInfo`] object. + + For flexibility and future-proofing, this method is very lax in its parsing and does not raise errors. + Missing fields are set to `None` without a warning. + + Return `None`, if the user is not logged in (no info in session cookie). + + See [`attach_huggingface_oauth`] for an example on how to use this method. + """ + if "oauth_info" not in request.session: + logger.debug("No OAuth info in session.") + return None + + logger.debug("Parsing OAuth info from session.") + oauth_data = request.session["oauth_info"] + user_data = oauth_data.get("userinfo", {}) + orgs_data = user_data.get("orgs", []) + + orgs = ( + [ + OAuthOrgInfo( + sub=org.get("sub"), + name=org.get("name"), + preferred_username=org.get("preferred_username"), + picture=org.get("picture"), + is_enterprise=org.get("isEnterprise"), + can_pay=org.get("canPay"), + role_in_org=org.get("roleInOrg"), + security_restrictions=org.get("securityRestrictions"), + ) + for org in orgs_data + ] + if orgs_data + else None + ) + + user_info = OAuthUserInfo( + sub=user_data.get("sub"), + name=user_data.get("name"), + preferred_username=user_data.get("preferred_username"), + email_verified=user_data.get("email_verified"), + email=user_data.get("email"), + picture=user_data.get("picture"), + profile=user_data.get("profile"), + website=user_data.get("website"), + is_pro=user_data.get("isPro"), + can_pay=user_data.get("canPay"), + orgs=orgs, + ) + + return OAuthInfo( + access_token=oauth_data.get("access_token"), + access_token_expires_at=datetime.datetime.fromtimestamp(oauth_data.get("expires_at")), + user_info=user_info, + state=oauth_data.get("state"), + scope=oauth_data.get("scope"), + ) + + +def _add_oauth_routes(app: "fastapi.FastAPI", route_prefix: str) -> None: + """Add OAuth routes to the FastAPI app (login, callback handler and logout).""" + try: + import fastapi + from authlib.integrations.base_client.errors import MismatchingStateError + from authlib.integrations.starlette_client import OAuth + from fastapi.responses import RedirectResponse + except ImportError as e: + raise ImportError( + "Cannot initialize OAuth to due a missing library. Please run `pip install huggingface_hub[oauth]` or add " + "`huggingface_hub[oauth]` to your requirements.txt file." + ) from e + + # Check environment variables + msg = ( + "OAuth is required but '{}' environment variable is not set. Make sure you've enabled OAuth in your Space by" + " setting `hf_oauth: true` in the Space metadata." + ) + if constants.OAUTH_CLIENT_ID is None: + raise ValueError(msg.format("OAUTH_CLIENT_ID")) + if constants.OAUTH_CLIENT_SECRET is None: + raise ValueError(msg.format("OAUTH_CLIENT_SECRET")) + if constants.OAUTH_SCOPES is None: + raise ValueError(msg.format("OAUTH_SCOPES")) + if constants.OPENID_PROVIDER_URL is None: + raise ValueError(msg.format("OPENID_PROVIDER_URL")) + + # Register OAuth server + oauth = OAuth() + oauth.register( + name="huggingface", + client_id=constants.OAUTH_CLIENT_ID, + client_secret=constants.OAUTH_CLIENT_SECRET, + client_kwargs={"scope": constants.OAUTH_SCOPES}, + server_metadata_url=constants.OPENID_PROVIDER_URL + "/.well-known/openid-configuration", + ) + + login_uri, callback_uri, logout_uri = _get_oauth_uris(route_prefix) + + # Register OAuth endpoints + @app.get(login_uri) + async def oauth_login(request: fastapi.Request) -> RedirectResponse: + """Endpoint that redirects to HF OAuth page.""" + redirect_uri = _generate_redirect_uri(request) + return await oauth.huggingface.authorize_redirect(request, redirect_uri) # type: ignore + + @app.get(callback_uri) + async def oauth_redirect_callback(request: fastapi.Request) -> RedirectResponse: + """Endpoint that handles the OAuth callback.""" + try: + oauth_info = await oauth.huggingface.authorize_access_token(request) # type: ignore + except MismatchingStateError: + # Parse query params + nb_redirects = int(request.query_params.get("_nb_redirects", 0)) + target_url = request.query_params.get("_target_url") + + # Build redirect URI with the same query params as before and bump nb_redirects count + query_params: Dict[str, Union[int, str]] = {"_nb_redirects": nb_redirects + 1} + if target_url: + query_params["_target_url"] = target_url + + redirect_uri = f"{login_uri}?{urllib.parse.urlencode(query_params)}" + + # If the user is redirected more than 3 times, it is very likely that the cookie is not working properly. + # (e.g. browser is blocking third-party cookies in iframe). In this case, redirect the user in the + # non-iframe view. + if nb_redirects > constants.OAUTH_MAX_REDIRECTS: + host = os.environ.get("SPACE_HOST") + if host is None: # cannot happen in a Space + raise RuntimeError( + "App is not running in a Space (SPACE_HOST environment variable is not set). Cannot redirect to non-iframe view." + ) from None + host_url = "https://" + host.rstrip("/") + return RedirectResponse(host_url + redirect_uri) + + # Redirect the user to the login page again + return RedirectResponse(redirect_uri) + + # OAuth login worked => store the user info in the session and redirect + logger.debug("Successfully logged in with OAuth. Storing user info in session.") + request.session["oauth_info"] = oauth_info + return RedirectResponse(_get_redirect_target(request)) + + @app.get(logout_uri) + async def oauth_logout(request: fastapi.Request) -> RedirectResponse: + """Endpoint that logs out the user (e.g. delete info from cookie session).""" + logger.debug("Logged out with OAuth. Removing user info from session.") + request.session.pop("oauth_info", None) + return RedirectResponse(_get_redirect_target(request)) + + +def _add_mocked_oauth_routes(app: "fastapi.FastAPI", route_prefix: str = "/") -> None: + """Add fake oauth routes if app is run locally and OAuth is enabled. + + Using OAuth will have the same behavior as in a Space but instead of authenticating with HF, a mocked user profile + is added to the session. + """ + try: + import fastapi + from fastapi.responses import RedirectResponse + from starlette.datastructures import URL + except ImportError as e: + raise ImportError( + "Cannot initialize OAuth to due a missing library. Please run `pip install huggingface_hub[oauth]` or add " + "`huggingface_hub[oauth]` to your requirements.txt file." + ) from e + + warnings.warn( + "OAuth is not supported outside of a Space environment. To help you debug your app locally, the oauth endpoints" + " are mocked to return your profile and token. To make it work, your machine must be logged in to Huggingface." + ) + mocked_oauth_info = _get_mocked_oauth_info() + + login_uri, callback_uri, logout_uri = _get_oauth_uris(route_prefix) + + # Define OAuth routes + @app.get(login_uri) + async def oauth_login(request: fastapi.Request) -> RedirectResponse: + """Fake endpoint that redirects to HF OAuth page.""" + # Define target (where to redirect after login) + redirect_uri = _generate_redirect_uri(request) + return RedirectResponse(callback_uri + "?" + urllib.parse.urlencode({"_target_url": redirect_uri})) + + @app.get(callback_uri) + async def oauth_redirect_callback(request: fastapi.Request) -> RedirectResponse: + """Endpoint that handles the OAuth callback.""" + request.session["oauth_info"] = mocked_oauth_info + return RedirectResponse(_get_redirect_target(request)) + + @app.get(logout_uri) + async def oauth_logout(request: fastapi.Request) -> RedirectResponse: + """Endpoint that logs out the user (e.g. delete cookie session).""" + request.session.pop("oauth_info", None) + logout_url = URL("/").include_query_params(**request.query_params) + return RedirectResponse(url=logout_url, status_code=302) # see https://github.com/gradio-app/gradio/pull/9659 + + +def _generate_redirect_uri(request: "fastapi.Request") -> str: + if "_target_url" in request.query_params: + # if `_target_url` already in query params => respect it + target = request.query_params["_target_url"] + else: + # otherwise => keep query params + target = "/?" + urllib.parse.urlencode(request.query_params) + + redirect_uri = request.url_for("oauth_redirect_callback").include_query_params(_target_url=target) + redirect_uri_as_str = str(redirect_uri) + if redirect_uri.netloc.endswith(".hf.space"): + # In Space, FastAPI redirect as http but we want https + redirect_uri_as_str = redirect_uri_as_str.replace("http://", "https://") + return redirect_uri_as_str + + +def _get_redirect_target(request: "fastapi.Request", default_target: str = "/") -> str: + return request.query_params.get("_target_url", default_target) + + +def _get_mocked_oauth_info() -> Dict: + token = get_token() + if token is None: + raise ValueError( + "Your machine must be logged in to HF to debug an OAuth app locally. Please" + " run `hf auth login` or set `HF_TOKEN` as environment variable " + "with one of your access token. You can generate a new token in your " + "settings page (https://huggingface.co/settings/tokens)." + ) + + user = whoami() + if user["type"] != "user": + raise ValueError( + "Your machine is not logged in with a personal account. Please use a " + "personal access token. You can generate a new token in your settings page" + " (https://huggingface.co/settings/tokens)." + ) + + return { + "access_token": token, + "token_type": "bearer", + "expires_in": 8 * 60 * 60, # 8 hours + "id_token": "FOOBAR", + "scope": "openid profile", + "refresh_token": "hf_oauth__refresh_token", + "expires_at": int(time.time()) + 8 * 60 * 60, # 8 hours + "userinfo": { + "sub": "0123456789", + "name": user["fullname"], + "preferred_username": user["name"], + "profile": f"https://huggingface.co/{user['name']}", + "picture": user["avatarUrl"], + "website": "", + "aud": "00000000-0000-0000-0000-000000000000", + "auth_time": 1691672844, + "nonce": "aaaaaaaaaaaaaaaaaaa", + "iat": 1691672844, + "exp": 1691676444, + "iss": "https://huggingface.co", + }, + } + + +def _get_oauth_uris(route_prefix: str = "/") -> Tuple[str, str, str]: + route_prefix = route_prefix.strip("/") + if route_prefix: + route_prefix = f"/{route_prefix}" + return ( + f"{route_prefix}/oauth/huggingface/login", + f"{route_prefix}/oauth/huggingface/callback", + f"{route_prefix}/oauth/huggingface/logout", + ) diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/_snapshot_download.py b/venv/lib/python3.13/site-packages/huggingface_hub/_snapshot_download.py new file mode 100644 index 0000000000000000000000000000000000000000..0db8a29f7e65a4841590d033f6b7b51d46647bf0 --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/_snapshot_download.py @@ -0,0 +1,343 @@ +import os +from pathlib import Path +from typing import Dict, Iterable, List, Literal, Optional, Type, Union + +import requests +from tqdm.auto import tqdm as base_tqdm +from tqdm.contrib.concurrent import thread_map + +from . import constants +from .errors import ( + GatedRepoError, + HfHubHTTPError, + LocalEntryNotFoundError, + RepositoryNotFoundError, + RevisionNotFoundError, +) +from .file_download import REGEX_COMMIT_HASH, hf_hub_download, repo_folder_name +from .hf_api import DatasetInfo, HfApi, ModelInfo, RepoFile, SpaceInfo +from .utils import OfflineModeIsEnabled, filter_repo_objects, logging, validate_hf_hub_args +from .utils import tqdm as hf_tqdm + + +logger = logging.get_logger(__name__) + +VERY_LARGE_REPO_THRESHOLD = 50000 # After this limit, we don't consider `repo_info.siblings` to be reliable enough + + +@validate_hf_hub_args +def snapshot_download( + repo_id: str, + *, + repo_type: Optional[str] = None, + revision: Optional[str] = None, + cache_dir: Union[str, Path, None] = None, + local_dir: Union[str, Path, None] = None, + library_name: Optional[str] = None, + library_version: Optional[str] = None, + user_agent: Optional[Union[Dict, str]] = None, + proxies: Optional[Dict] = None, + etag_timeout: float = constants.DEFAULT_ETAG_TIMEOUT, + force_download: bool = False, + token: Optional[Union[bool, str]] = None, + local_files_only: bool = False, + allow_patterns: Optional[Union[List[str], str]] = None, + ignore_patterns: Optional[Union[List[str], str]] = None, + max_workers: int = 8, + tqdm_class: Optional[Type[base_tqdm]] = None, + headers: Optional[Dict[str, str]] = None, + endpoint: Optional[str] = None, + # Deprecated args + local_dir_use_symlinks: Union[bool, Literal["auto"]] = "auto", + resume_download: Optional[bool] = None, +) -> str: + """Download repo files. + + Download a whole snapshot of a repo's files at the specified revision. This is useful when you want all files from + a repo, because you don't know which ones you will need a priori. All files are nested inside a folder in order + to keep their actual filename relative to that folder. You can also filter which files to download using + `allow_patterns` and `ignore_patterns`. + + If `local_dir` is provided, the file structure from the repo will be replicated in this location. When using this + option, the `cache_dir` will not be used and a `.cache/huggingface/` folder will be created at the root of `local_dir` + to store some metadata related to the downloaded files. While this mechanism is not as robust as the main + cache-system, it's optimized for regularly pulling the latest version of a repository. + + An alternative would be to clone the repo but this requires git and git-lfs to be installed and properly + configured. It is also not possible to filter which files to download when cloning a repository using git. + + Args: + repo_id (`str`): + A user or an organization name and a repo name separated by a `/`. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if downloading from a dataset or space, + `None` or `"model"` if downloading from a model. Default is `None`. + revision (`str`, *optional*): + An optional Git revision id which can be a branch name, a tag, or a + commit hash. + cache_dir (`str`, `Path`, *optional*): + Path to the folder where cached files are stored. + local_dir (`str` or `Path`, *optional*): + If provided, the downloaded files will be placed under this directory. + library_name (`str`, *optional*): + The name of the library to which the object corresponds. + library_version (`str`, *optional*): + The version of the library. + user_agent (`str`, `dict`, *optional*): + The user-agent info in the form of a dictionary or a string. + proxies (`dict`, *optional*): + Dictionary mapping protocol to the URL of the proxy passed to + `requests.request`. + etag_timeout (`float`, *optional*, defaults to `10`): + When fetching ETag, how many seconds to wait for the server to send + data before giving up which is passed to `requests.request`. + force_download (`bool`, *optional*, defaults to `False`): + Whether the file should be downloaded even if it already exists in the local cache. + token (`str`, `bool`, *optional*): + A token to be used for the download. + - If `True`, the token is read from the HuggingFace config + folder. + - If a string, it's used as the authentication token. + headers (`dict`, *optional*): + Additional headers to include in the request. Those headers take precedence over the others. + local_files_only (`bool`, *optional*, defaults to `False`): + If `True`, avoid downloading the file and return the path to the + local cached file if it exists. + allow_patterns (`List[str]` or `str`, *optional*): + If provided, only files matching at least one pattern are downloaded. + ignore_patterns (`List[str]` or `str`, *optional*): + If provided, files matching any of the patterns are not downloaded. + max_workers (`int`, *optional*): + Number of concurrent threads to download files (1 thread = 1 file download). + Defaults to 8. + tqdm_class (`tqdm`, *optional*): + If provided, overwrites the default behavior for the progress bar. Passed + argument must inherit from `tqdm.auto.tqdm` or at least mimic its behavior. + Note that the `tqdm_class` is not passed to each individual download. + Defaults to the custom HF progress bar that can be disabled by setting + `HF_HUB_DISABLE_PROGRESS_BARS` environment variable. + + Returns: + `str`: folder path of the repo snapshot. + + Raises: + [`~utils.RepositoryNotFoundError`] + If the repository to download from cannot be found. This may be because it doesn't exist, + or because it is set to `private` and you do not have access. + [`~utils.RevisionNotFoundError`] + If the revision to download from cannot be found. + [`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError) + If `token=True` and the token cannot be found. + [`OSError`](https://docs.python.org/3/library/exceptions.html#OSError) if + ETag cannot be determined. + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + if some parameter value is invalid. + """ + if cache_dir is None: + cache_dir = constants.HF_HUB_CACHE + if revision is None: + revision = constants.DEFAULT_REVISION + if isinstance(cache_dir, Path): + cache_dir = str(cache_dir) + + if repo_type is None: + repo_type = "model" + if repo_type not in constants.REPO_TYPES: + raise ValueError(f"Invalid repo type: {repo_type}. Accepted repo types are: {str(constants.REPO_TYPES)}") + + storage_folder = os.path.join(cache_dir, repo_folder_name(repo_id=repo_id, repo_type=repo_type)) + + api = HfApi( + library_name=library_name, + library_version=library_version, + user_agent=user_agent, + endpoint=endpoint, + headers=headers, + token=token, + ) + + repo_info: Union[ModelInfo, DatasetInfo, SpaceInfo, None] = None + api_call_error: Optional[Exception] = None + if not local_files_only: + # try/except logic to handle different errors => taken from `hf_hub_download` + try: + # if we have internet connection we want to list files to download + repo_info = api.repo_info(repo_id=repo_id, repo_type=repo_type, revision=revision) + except (requests.exceptions.SSLError, requests.exceptions.ProxyError): + # Actually raise for those subclasses of ConnectionError + raise + except ( + requests.exceptions.ConnectionError, + requests.exceptions.Timeout, + OfflineModeIsEnabled, + ) as error: + # Internet connection is down + # => will try to use local files only + api_call_error = error + pass + except RevisionNotFoundError: + # The repo was found but the revision doesn't exist on the Hub (never existed or got deleted) + raise + except requests.HTTPError as error: + # Multiple reasons for an http error: + # - Repository is private and invalid/missing token sent + # - Repository is gated and invalid/missing token sent + # - Hub is down (error 500 or 504) + # => let's switch to 'local_files_only=True' to check if the files are already cached. + # (if it's not the case, the error will be re-raised) + api_call_error = error + pass + + # At this stage, if `repo_info` is None it means either: + # - internet connection is down + # - internet connection is deactivated (local_files_only=True or HF_HUB_OFFLINE=True) + # - repo is private/gated and invalid/missing token sent + # - Hub is down + # => let's look if we can find the appropriate folder in the cache: + # - if the specified revision is a commit hash, look inside "snapshots". + # - f the specified revision is a branch or tag, look inside "refs". + # => if local_dir is not None, we will return the path to the local folder if it exists. + if repo_info is None: + # Try to get which commit hash corresponds to the specified revision + commit_hash = None + if REGEX_COMMIT_HASH.match(revision): + commit_hash = revision + else: + ref_path = os.path.join(storage_folder, "refs", revision) + if os.path.exists(ref_path): + # retrieve commit_hash from refs file + with open(ref_path) as f: + commit_hash = f.read() + + # Try to locate snapshot folder for this commit hash + if commit_hash is not None and local_dir is None: + snapshot_folder = os.path.join(storage_folder, "snapshots", commit_hash) + if os.path.exists(snapshot_folder): + # Snapshot folder exists => let's return it + # (but we can't check if all the files are actually there) + return snapshot_folder + + # If local_dir is not None, return it if it exists and is not empty + if local_dir is not None: + local_dir = Path(local_dir) + if local_dir.is_dir() and any(local_dir.iterdir()): + logger.warning( + f"Returning existing local_dir `{local_dir}` as remote repo cannot be accessed in `snapshot_download` ({api_call_error})." + ) + return str(local_dir.resolve()) + # If we couldn't find the appropriate folder on disk, raise an error. + if local_files_only: + raise LocalEntryNotFoundError( + "Cannot find an appropriate cached snapshot folder for the specified revision on the local disk and " + "outgoing traffic has been disabled. To enable repo look-ups and downloads online, pass " + "'local_files_only=False' as input." + ) + elif isinstance(api_call_error, OfflineModeIsEnabled): + raise LocalEntryNotFoundError( + "Cannot find an appropriate cached snapshot folder for the specified revision on the local disk and " + "outgoing traffic has been disabled. To enable repo look-ups and downloads online, set " + "'HF_HUB_OFFLINE=0' as environment variable." + ) from api_call_error + elif isinstance(api_call_error, (RepositoryNotFoundError, GatedRepoError)) or ( + isinstance(api_call_error, HfHubHTTPError) and api_call_error.response.status_code == 401 + ): + # Repo not found, gated, or specific authentication error => let's raise the actual error + raise api_call_error + else: + # Otherwise: most likely a connection issue or Hub downtime => let's warn the user + raise LocalEntryNotFoundError( + "An error happened while trying to locate the files on the Hub and we cannot find the appropriate" + " snapshot folder for the specified revision on the local disk. Please check your internet connection" + " and try again." + ) from api_call_error + + # At this stage, internet connection is up and running + # => let's download the files! + assert repo_info.sha is not None, "Repo info returned from server must have a revision sha." + + # Corner case: on very large repos, the siblings list in `repo_info` might not contain all files. + # In that case, we need to use the `list_repo_tree` method to prevent caching issues. + repo_files: Iterable[str] = [f.rfilename for f in repo_info.siblings] if repo_info.siblings is not None else [] + unreliable_nb_files = ( + repo_info.siblings is None + or len(repo_info.siblings) == 0 + or len(repo_info.siblings) > VERY_LARGE_REPO_THRESHOLD + ) + if unreliable_nb_files: + logger.info( + "Number of files in the repo is unreliable. Using `list_repo_tree` to ensure all files are listed." + ) + repo_files = ( + f.rfilename + for f in api.list_repo_tree(repo_id=repo_id, recursive=True, revision=revision, repo_type=repo_type) + if isinstance(f, RepoFile) + ) + + filtered_repo_files: Iterable[str] = filter_repo_objects( + items=repo_files, + allow_patterns=allow_patterns, + ignore_patterns=ignore_patterns, + ) + + if not unreliable_nb_files: + filtered_repo_files = list(filtered_repo_files) + tqdm_desc = f"Fetching {len(filtered_repo_files)} files" + else: + tqdm_desc = "Fetching ... files" + + commit_hash = repo_info.sha + snapshot_folder = os.path.join(storage_folder, "snapshots", commit_hash) + # if passed revision is not identical to commit_hash + # then revision has to be a branch name or tag name. + # In that case store a ref. + if revision != commit_hash: + ref_path = os.path.join(storage_folder, "refs", revision) + try: + os.makedirs(os.path.dirname(ref_path), exist_ok=True) + with open(ref_path, "w") as f: + f.write(commit_hash) + except OSError as e: + logger.warning(f"Ignored error while writing commit hash to {ref_path}: {e}.") + + # we pass the commit_hash to hf_hub_download + # so no network call happens if we already + # have the file locally. + def _inner_hf_hub_download(repo_file: str): + return hf_hub_download( + repo_id, + filename=repo_file, + repo_type=repo_type, + revision=commit_hash, + endpoint=endpoint, + cache_dir=cache_dir, + local_dir=local_dir, + local_dir_use_symlinks=local_dir_use_symlinks, + library_name=library_name, + library_version=library_version, + user_agent=user_agent, + proxies=proxies, + etag_timeout=etag_timeout, + resume_download=resume_download, + force_download=force_download, + token=token, + headers=headers, + ) + + if constants.HF_HUB_ENABLE_HF_TRANSFER: + # when using hf_transfer we don't want extra parallelism + # from the one hf_transfer provides + for file in filtered_repo_files: + _inner_hf_hub_download(file) + else: + thread_map( + _inner_hf_hub_download, + filtered_repo_files, + desc=tqdm_desc, + max_workers=max_workers, + # User can use its own tqdm class or the default one from `huggingface_hub.utils` + tqdm_class=tqdm_class or hf_tqdm, + ) + + if local_dir is not None: + return str(os.path.realpath(local_dir)) + return snapshot_folder diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/_space_api.py b/venv/lib/python3.13/site-packages/huggingface_hub/_space_api.py new file mode 100644 index 0000000000000000000000000000000000000000..05fccfbc1ebdfc14840a88751914b8fc0d1a498d --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/_space_api.py @@ -0,0 +1,168 @@ +# coding=utf-8 +# Copyright 2019-present, the HuggingFace Inc. team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from dataclasses import dataclass +from datetime import datetime +from enum import Enum +from typing import Dict, Optional + +from huggingface_hub.utils import parse_datetime + + +class SpaceStage(str, Enum): + """ + Enumeration of possible stage of a Space on the Hub. + + Value can be compared to a string: + ```py + assert SpaceStage.BUILDING == "BUILDING" + ``` + + Taken from https://github.com/huggingface/moon-landing/blob/main/server/repo_types/SpaceInfo.ts#L61 (private url). + """ + + # Copied from moon-landing > server > repo_types > SpaceInfo.ts (private repo) + NO_APP_FILE = "NO_APP_FILE" + CONFIG_ERROR = "CONFIG_ERROR" + BUILDING = "BUILDING" + BUILD_ERROR = "BUILD_ERROR" + RUNNING = "RUNNING" + RUNNING_BUILDING = "RUNNING_BUILDING" + RUNTIME_ERROR = "RUNTIME_ERROR" + DELETING = "DELETING" + STOPPED = "STOPPED" + PAUSED = "PAUSED" + + +class SpaceHardware(str, Enum): + """ + Enumeration of hardwares available to run your Space on the Hub. + + Value can be compared to a string: + ```py + assert SpaceHardware.CPU_BASIC == "cpu-basic" + ``` + + Taken from https://github.com/huggingface-internal/moon-landing/blob/main/server/repo_types/SpaceHardwareFlavor.ts (private url). + """ + + # CPU + CPU_BASIC = "cpu-basic" + CPU_UPGRADE = "cpu-upgrade" + CPU_XL = "cpu-xl" + + # ZeroGPU + ZERO_A10G = "zero-a10g" + + # GPU + T4_SMALL = "t4-small" + T4_MEDIUM = "t4-medium" + L4X1 = "l4x1" + L4X4 = "l4x4" + L40SX1 = "l40sx1" + L40SX4 = "l40sx4" + L40SX8 = "l40sx8" + A10G_SMALL = "a10g-small" + A10G_LARGE = "a10g-large" + A10G_LARGEX2 = "a10g-largex2" + A10G_LARGEX4 = "a10g-largex4" + A100_LARGE = "a100-large" + H100 = "h100" + H100X8 = "h100x8" + + +class SpaceStorage(str, Enum): + """ + Enumeration of persistent storage available for your Space on the Hub. + + Value can be compared to a string: + ```py + assert SpaceStorage.SMALL == "small" + ``` + + Taken from https://github.com/huggingface/moon-landing/blob/main/server/repo_types/SpaceHardwareFlavor.ts#L24 (private url). + """ + + SMALL = "small" + MEDIUM = "medium" + LARGE = "large" + + +@dataclass +class SpaceRuntime: + """ + Contains information about the current runtime of a Space. + + Args: + stage (`str`): + Current stage of the space. Example: RUNNING. + hardware (`str` or `None`): + Current hardware of the space. Example: "cpu-basic". Can be `None` if Space + is `BUILDING` for the first time. + requested_hardware (`str` or `None`): + Requested hardware. Can be different than `hardware` especially if the request + has just been made. Example: "t4-medium". Can be `None` if no hardware has + been requested yet. + sleep_time (`int` or `None`): + Number of seconds the Space will be kept alive after the last request. By default (if value is `None`), the + Space will never go to sleep if it's running on an upgraded hardware, while it will go to sleep after 48 + hours on a free 'cpu-basic' hardware. For more details, see https://huggingface.co/docs/hub/spaces-gpus#sleep-time. + raw (`dict`): + Raw response from the server. Contains more information about the Space + runtime like number of replicas, number of cpu, memory size,... + """ + + stage: SpaceStage + hardware: Optional[SpaceHardware] + requested_hardware: Optional[SpaceHardware] + sleep_time: Optional[int] + storage: Optional[SpaceStorage] + raw: Dict + + def __init__(self, data: Dict) -> None: + self.stage = data["stage"] + self.hardware = data.get("hardware", {}).get("current") + self.requested_hardware = data.get("hardware", {}).get("requested") + self.sleep_time = data.get("gcTimeout") + self.storage = data.get("storage") + self.raw = data + + +@dataclass +class SpaceVariable: + """ + Contains information about the current variables of a Space. + + Args: + key (`str`): + Variable key. Example: `"MODEL_REPO_ID"` + value (`str`): + Variable value. Example: `"the_model_repo_id"`. + description (`str` or None): + Description of the variable. Example: `"Model Repo ID of the implemented model"`. + updatedAt (`datetime` or None): + datetime of the last update of the variable (if the variable has been updated at least once). + """ + + key: str + value: str + description: Optional[str] + updated_at: Optional[datetime] + + def __init__(self, key: str, values: Dict) -> None: + self.key = key + self.value = values["value"] + self.description = values.get("description") + updated_at = values.get("updatedAt") + self.updated_at = parse_datetime(updated_at) if updated_at is not None else None diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/_tensorboard_logger.py b/venv/lib/python3.13/site-packages/huggingface_hub/_tensorboard_logger.py new file mode 100644 index 0000000000000000000000000000000000000000..4d9581d8ee127436ec1e1d585ed0426422a66131 --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/_tensorboard_logger.py @@ -0,0 +1,190 @@ +# Copyright 2023 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Contains a logger to push training logs to the Hub, using Tensorboard.""" + +from pathlib import Path +from typing import List, Optional, Union + +from ._commit_scheduler import CommitScheduler +from .errors import EntryNotFoundError +from .repocard import ModelCard +from .utils import experimental + + +# Depending on user's setup, SummaryWriter can come either from 'tensorboardX' +# or from 'torch.utils.tensorboard'. Both are compatible so let's try to load +# from either of them. +try: + from tensorboardX import SummaryWriter as _RuntimeSummaryWriter + + is_summary_writer_available = True +except ImportError: + try: + from torch.utils.tensorboard import SummaryWriter as _RuntimeSummaryWriter + + is_summary_writer_available = True + except ImportError: + # Dummy class to avoid failing at import. Will raise on instance creation. + class _DummySummaryWriter: + pass + + _RuntimeSummaryWriter = _DummySummaryWriter # type: ignore[assignment] + is_summary_writer_available = False + + +class HFSummaryWriter(_RuntimeSummaryWriter): + """ + Wrapper around the tensorboard's `SummaryWriter` to push training logs to the Hub. + + Data is logged locally and then pushed to the Hub asynchronously. Pushing data to the Hub is done in a separate + thread to avoid blocking the training script. In particular, if the upload fails for any reason (e.g. a connection + issue), the main script will not be interrupted. Data is automatically pushed to the Hub every `commit_every` + minutes (default to every 5 minutes). + + > [!WARNING] + > `HFSummaryWriter` is experimental. Its API is subject to change in the future without prior notice. + + Args: + repo_id (`str`): + The id of the repo to which the logs will be pushed. + logdir (`str`, *optional*): + The directory where the logs will be written. If not specified, a local directory will be created by the + underlying `SummaryWriter` object. + commit_every (`int` or `float`, *optional*): + The frequency (in minutes) at which the logs will be pushed to the Hub. Defaults to 5 minutes. + squash_history (`bool`, *optional*): + Whether to squash the history of the repo after each commit. Defaults to `False`. Squashing commits is + useful to avoid degraded performances on the repo when it grows too large. + repo_type (`str`, *optional*): + The type of the repo to which the logs will be pushed. Defaults to "model". + repo_revision (`str`, *optional*): + The revision of the repo to which the logs will be pushed. Defaults to "main". + repo_private (`bool`, *optional*): + Whether to make the repo private. If `None` (default), the repo will be public unless the organization's default is private. This value is ignored if the repo already exists. + path_in_repo (`str`, *optional*): + The path to the folder in the repo where the logs will be pushed. Defaults to "tensorboard/". + repo_allow_patterns (`List[str]` or `str`, *optional*): + A list of patterns to include in the upload. Defaults to `"*.tfevents.*"`. Check out the + [upload guide](https://huggingface.co/docs/huggingface_hub/guides/upload#upload-a-folder) for more details. + repo_ignore_patterns (`List[str]` or `str`, *optional*): + A list of patterns to exclude in the upload. Check out the + [upload guide](https://huggingface.co/docs/huggingface_hub/guides/upload#upload-a-folder) for more details. + token (`str`, *optional*): + Authentication token. Will default to the stored token. See https://huggingface.co/settings/token for more + details + kwargs: + Additional keyword arguments passed to `SummaryWriter`. + + Examples: + ```diff + # Taken from https://pytorch.org/docs/stable/tensorboard.html + - from torch.utils.tensorboard import SummaryWriter + + from huggingface_hub import HFSummaryWriter + + import numpy as np + + - writer = SummaryWriter() + + writer = HFSummaryWriter(repo_id="username/my-trained-model") + + for n_iter in range(100): + writer.add_scalar('Loss/train', np.random.random(), n_iter) + writer.add_scalar('Loss/test', np.random.random(), n_iter) + writer.add_scalar('Accuracy/train', np.random.random(), n_iter) + writer.add_scalar('Accuracy/test', np.random.random(), n_iter) + ``` + + ```py + >>> from huggingface_hub import HFSummaryWriter + + # Logs are automatically pushed every 15 minutes (5 by default) + when exiting the context manager + >>> with HFSummaryWriter(repo_id="test_hf_logger", commit_every=15) as logger: + ... logger.add_scalar("a", 1) + ... logger.add_scalar("b", 2) + ``` + """ + + @experimental + def __new__(cls, *args, **kwargs) -> "HFSummaryWriter": + if not is_summary_writer_available: + raise ImportError( + "You must have `tensorboard` installed to use `HFSummaryWriter`. Please run `pip install --upgrade" + " tensorboardX` first." + ) + return super().__new__(cls) + + def __init__( + self, + repo_id: str, + *, + logdir: Optional[str] = None, + commit_every: Union[int, float] = 5, + squash_history: bool = False, + repo_type: Optional[str] = None, + repo_revision: Optional[str] = None, + repo_private: Optional[bool] = None, + path_in_repo: Optional[str] = "tensorboard", + repo_allow_patterns: Optional[Union[List[str], str]] = "*.tfevents.*", + repo_ignore_patterns: Optional[Union[List[str], str]] = None, + token: Optional[str] = None, + **kwargs, + ): + # Initialize SummaryWriter + super().__init__(logdir=logdir, **kwargs) + + # Check logdir has been correctly initialized and fail early otherwise. In practice, SummaryWriter takes care of it. + if not isinstance(self.logdir, str): + raise ValueError(f"`self.logdir` must be a string. Got '{self.logdir}' of type {type(self.logdir)}.") + + # Append logdir name to `path_in_repo` + if path_in_repo is None or path_in_repo == "": + path_in_repo = Path(self.logdir).name + else: + path_in_repo = path_in_repo.strip("/") + "/" + Path(self.logdir).name + + # Initialize scheduler + self.scheduler = CommitScheduler( + folder_path=self.logdir, + path_in_repo=path_in_repo, + repo_id=repo_id, + repo_type=repo_type, + revision=repo_revision, + private=repo_private, + token=token, + allow_patterns=repo_allow_patterns, + ignore_patterns=repo_ignore_patterns, + every=commit_every, + squash_history=squash_history, + ) + + # Exposing some high-level info at root level + self.repo_id = self.scheduler.repo_id + self.repo_type = self.scheduler.repo_type + self.repo_revision = self.scheduler.revision + + # Add `hf-summary-writer` tag to the model card metadata + try: + card = ModelCard.load(repo_id_or_path=self.repo_id, repo_type=self.repo_type) + except EntryNotFoundError: + card = ModelCard("") + tags = card.data.get("tags", []) + if "hf-summary-writer" not in tags: + tags.append("hf-summary-writer") + card.data["tags"] = tags + card.push_to_hub(repo_id=self.repo_id, repo_type=self.repo_type) + + def __exit__(self, exc_type, exc_val, exc_tb): + """Push to hub in a non-blocking way when exiting the logger's context manager.""" + super().__exit__(exc_type, exc_val, exc_tb) + future = self.scheduler.trigger() + future.result() diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/_upload_large_folder.py b/venv/lib/python3.13/site-packages/huggingface_hub/_upload_large_folder.py new file mode 100644 index 0000000000000000000000000000000000000000..1ccbc07d39d3d03e9bb8c39f1bb16aa2ca4ab41f --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/_upload_large_folder.py @@ -0,0 +1,755 @@ +# coding=utf-8 +# Copyright 2024-present, the HuggingFace Inc. team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import enum +import logging +import os +import queue +import shutil +import sys +import threading +import time +import traceback +from datetime import datetime +from pathlib import Path +from threading import Lock +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union +from urllib.parse import quote + +from . import constants +from ._commit_api import CommitOperationAdd, UploadInfo, _fetch_upload_modes +from ._local_folder import LocalUploadFileMetadata, LocalUploadFilePaths, get_local_upload_paths, read_upload_metadata +from .constants import DEFAULT_REVISION, REPO_TYPES +from .utils import DEFAULT_IGNORE_PATTERNS, filter_repo_objects, tqdm +from .utils._cache_manager import _format_size +from .utils._runtime import is_xet_available +from .utils.sha import sha_fileobj + + +if TYPE_CHECKING: + from .hf_api import HfApi + +logger = logging.getLogger(__name__) + +WAITING_TIME_IF_NO_TASKS = 10 # seconds +MAX_NB_FILES_FETCH_UPLOAD_MODE = 100 +COMMIT_SIZE_SCALE: List[int] = [20, 50, 75, 100, 125, 200, 250, 400, 600, 1000] + +UPLOAD_BATCH_SIZE_XET = 256 # Max 256 files per upload batch for XET-enabled repos +UPLOAD_BATCH_SIZE_LFS = 1 # Otherwise, batches of 1 for regular LFS upload + +# Repository limits (from https://huggingface.co/docs/hub/repositories-recommendations) +MAX_FILES_PER_REPO = 100_000 # Recommended maximum number of files per repository +MAX_FILES_PER_FOLDER = 10_000 # Recommended maximum number of files per folder +MAX_FILE_SIZE_GB = 50 # Hard limit for individual file size +RECOMMENDED_FILE_SIZE_GB = 20 # Recommended maximum for individual file size + + +def _validate_upload_limits(paths_list: List[LocalUploadFilePaths]) -> None: + """ + Validate upload against repository limits and warn about potential issues. + + Args: + paths_list: List of file paths to be uploaded + + Warns about: + - Too many files in the repository (>100k) + - Too many entries (files or subdirectories) in a single folder (>10k) + - Files exceeding size limits (>20GB recommended, >50GB hard limit) + """ + logger.info("Running validation checks on files to upload...") + + # Check 1: Total file count + if len(paths_list) > MAX_FILES_PER_REPO: + logger.warning( + f"You are about to upload {len(paths_list):,} files. " + f"This exceeds the recommended limit of {MAX_FILES_PER_REPO:,} files per repository.\n" + f"Consider:\n" + f" - Splitting your data into multiple repositories\n" + f" - Using fewer, larger files (e.g., parquet files)\n" + f" - See: https://huggingface.co/docs/hub/repositories-recommendations" + ) + + # Check 2: Files and subdirectories per folder + # Track immediate children (files and subdirs) for each folder + from collections import defaultdict + + entries_per_folder: Dict[str, Any] = defaultdict(lambda: {"files": 0, "subdirs": set()}) + + for paths in paths_list: + path = Path(paths.path_in_repo) + parts = path.parts + + # Count this file in its immediate parent directory + parent = str(path.parent) if str(path.parent) != "." else "." + entries_per_folder[parent]["files"] += 1 + + # Track immediate subdirectories for each parent folder + # Walk through the path components to track parent-child relationships + for i, child in enumerate(parts[:-1]): + parent = "." if i == 0 else "/".join(parts[:i]) + entries_per_folder[parent]["subdirs"].add(child) + + # Check limits for each folder + for folder, data in entries_per_folder.items(): + file_count = data["files"] + subdir_count = len(data["subdirs"]) + total_entries = file_count + subdir_count + + if total_entries > MAX_FILES_PER_FOLDER: + folder_display = "root" if folder == "." else folder + logger.warning( + f"Folder '{folder_display}' contains {total_entries:,} entries " + f"({file_count:,} files and {subdir_count:,} subdirectories). " + f"This exceeds the recommended {MAX_FILES_PER_FOLDER:,} entries per folder.\n" + "Consider reorganising into sub-folders." + ) + + # Check 3: File sizes + large_files = [] + very_large_files = [] + + for paths in paths_list: + size = paths.file_path.stat().st_size + size_gb = size / 1_000_000_000 # Use decimal GB as per Hub limits + + if size_gb > MAX_FILE_SIZE_GB: + very_large_files.append((paths.path_in_repo, size_gb)) + elif size_gb > RECOMMENDED_FILE_SIZE_GB: + large_files.append((paths.path_in_repo, size_gb)) + + # Warn about very large files (>50GB) + if very_large_files: + files_str = "\n - ".join(f"{path}: {size:.1f}GB" for path, size in very_large_files[:5]) + more_str = f"\n ... and {len(very_large_files) - 5} more files" if len(very_large_files) > 5 else "" + logger.warning( + f"Found {len(very_large_files)} files exceeding the {MAX_FILE_SIZE_GB}GB hard limit:\n" + f" - {files_str}{more_str}\n" + f"These files may fail to upload. Consider splitting them into smaller chunks." + ) + + # Warn about large files (>20GB) + if large_files: + files_str = "\n - ".join(f"{path}: {size:.1f}GB" for path, size in large_files[:5]) + more_str = f"\n ... and {len(large_files) - 5} more files" if len(large_files) > 5 else "" + logger.warning( + f"Found {len(large_files)} files larger than {RECOMMENDED_FILE_SIZE_GB}GB (recommended limit):\n" + f" - {files_str}{more_str}\n" + f"Large files may slow down loading and processing." + ) + + logger.info("Validation checks complete.") + + +def upload_large_folder_internal( + api: "HfApi", + repo_id: str, + folder_path: Union[str, Path], + *, + repo_type: str, # Repo type is required! + revision: Optional[str] = None, + private: Optional[bool] = None, + allow_patterns: Optional[Union[List[str], str]] = None, + ignore_patterns: Optional[Union[List[str], str]] = None, + num_workers: Optional[int] = None, + print_report: bool = True, + print_report_every: int = 60, +): + """Upload a large folder to the Hub in the most resilient way possible. + + See [`HfApi.upload_large_folder`] for the full documentation. + """ + # 1. Check args and setup + if repo_type is None: + raise ValueError( + "For large uploads, `repo_type` is explicitly required. Please set it to `model`, `dataset` or `space`." + " If you are using the CLI, pass it as `--repo-type=model`." + ) + if repo_type not in REPO_TYPES: + raise ValueError(f"Invalid repo type, must be one of {REPO_TYPES}") + if revision is None: + revision = DEFAULT_REVISION + + folder_path = Path(folder_path).expanduser().resolve() + if not folder_path.is_dir(): + raise ValueError(f"Provided path: '{folder_path}' is not a directory") + + if ignore_patterns is None: + ignore_patterns = [] + elif isinstance(ignore_patterns, str): + ignore_patterns = [ignore_patterns] + ignore_patterns += DEFAULT_IGNORE_PATTERNS + + if num_workers is None: + nb_cores = os.cpu_count() or 1 + num_workers = max(nb_cores - 2, 2) # Use all but 2 cores, or at least 2 cores + + # 2. Create repo if missing + repo_url = api.create_repo(repo_id=repo_id, repo_type=repo_type, private=private, exist_ok=True) + logger.info(f"Repo created: {repo_url}") + repo_id = repo_url.repo_id + # 2.1 Check if xet is enabled to set batch file upload size + is_xet_enabled = ( + is_xet_available() + and api.repo_info( + repo_id=repo_id, + repo_type=repo_type, + revision=revision, + expand="xetEnabled", + ).xet_enabled + ) + upload_batch_size = UPLOAD_BATCH_SIZE_XET if is_xet_enabled else UPLOAD_BATCH_SIZE_LFS + + # 3. List files to upload + filtered_paths_list = filter_repo_objects( + (path.relative_to(folder_path).as_posix() for path in folder_path.glob("**/*") if path.is_file()), + allow_patterns=allow_patterns, + ignore_patterns=ignore_patterns, + ) + paths_list = [get_local_upload_paths(folder_path, relpath) for relpath in filtered_paths_list] + logger.info(f"Found {len(paths_list)} candidate files to upload") + + # Validate upload against repository limits + _validate_upload_limits(paths_list) + + logger.info("Starting upload...") + + # Read metadata for each file + items = [ + (paths, read_upload_metadata(folder_path, paths.path_in_repo)) + for paths in tqdm(paths_list, desc="Recovering from metadata files") + ] + + # 4. Start workers + status = LargeUploadStatus(items, upload_batch_size) + threads = [ + threading.Thread( + target=_worker_job, + kwargs={ + "status": status, + "api": api, + "repo_id": repo_id, + "repo_type": repo_type, + "revision": revision, + }, + ) + for _ in range(num_workers) + ] + + for thread in threads: + thread.start() + + # 5. Print regular reports + if print_report: + print("\n\n" + status.current_report()) + last_report_ts = time.time() + while True: + time.sleep(1) + if time.time() - last_report_ts >= print_report_every: + if print_report: + _print_overwrite(status.current_report()) + last_report_ts = time.time() + if status.is_done(): + logging.info("Is done: exiting main loop") + break + + for thread in threads: + thread.join() + + logger.info(status.current_report()) + logging.info("Upload is complete!") + + +#################### +# Logic to manage workers and synchronize tasks +#################### + + +class WorkerJob(enum.Enum): + SHA256 = enum.auto() + GET_UPLOAD_MODE = enum.auto() + PREUPLOAD_LFS = enum.auto() + COMMIT = enum.auto() + WAIT = enum.auto() # if no tasks are available but we don't want to exit + + +JOB_ITEM_T = Tuple[LocalUploadFilePaths, LocalUploadFileMetadata] + + +class LargeUploadStatus: + """Contains information, queues and tasks for a large upload process.""" + + def __init__(self, items: List[JOB_ITEM_T], upload_batch_size: int = 1): + self.items = items + self.queue_sha256: "queue.Queue[JOB_ITEM_T]" = queue.Queue() + self.queue_get_upload_mode: "queue.Queue[JOB_ITEM_T]" = queue.Queue() + self.queue_preupload_lfs: "queue.Queue[JOB_ITEM_T]" = queue.Queue() + self.queue_commit: "queue.Queue[JOB_ITEM_T]" = queue.Queue() + self.lock = Lock() + + self.nb_workers_sha256: int = 0 + self.nb_workers_get_upload_mode: int = 0 + self.nb_workers_preupload_lfs: int = 0 + self.upload_batch_size: int = upload_batch_size + self.nb_workers_commit: int = 0 + self.nb_workers_waiting: int = 0 + self.last_commit_attempt: Optional[float] = None + + self._started_at = datetime.now() + self._chunk_idx: int = 1 + self._chunk_lock: Lock = Lock() + + # Setup queues + for item in self.items: + paths, metadata = item + if metadata.sha256 is None: + self.queue_sha256.put(item) + elif metadata.upload_mode is None: + self.queue_get_upload_mode.put(item) + elif metadata.upload_mode == "lfs" and not metadata.is_uploaded: + self.queue_preupload_lfs.put(item) + elif not metadata.is_committed: + self.queue_commit.put(item) + else: + logger.debug(f"Skipping file {paths.path_in_repo} (already uploaded and committed)") + + def target_chunk(self) -> int: + with self._chunk_lock: + return COMMIT_SIZE_SCALE[self._chunk_idx] + + def update_chunk(self, success: bool, nb_items: int, duration: float) -> None: + with self._chunk_lock: + if not success: + logger.warning(f"Failed to commit {nb_items} files at once. Will retry with less files in next batch.") + self._chunk_idx -= 1 + elif nb_items >= COMMIT_SIZE_SCALE[self._chunk_idx] and duration < 40: + logger.info(f"Successfully committed {nb_items} at once. Increasing the limit for next batch.") + self._chunk_idx += 1 + + self._chunk_idx = max(0, min(self._chunk_idx, len(COMMIT_SIZE_SCALE) - 1)) + + def current_report(self) -> str: + """Generate a report of the current status of the large upload.""" + nb_hashed = 0 + size_hashed = 0 + nb_preuploaded = 0 + nb_lfs = 0 + nb_lfs_unsure = 0 + size_preuploaded = 0 + nb_committed = 0 + size_committed = 0 + total_size = 0 + ignored_files = 0 + total_files = 0 + + with self.lock: + for _, metadata in self.items: + if metadata.should_ignore: + ignored_files += 1 + continue + total_size += metadata.size + total_files += 1 + if metadata.sha256 is not None: + nb_hashed += 1 + size_hashed += metadata.size + if metadata.upload_mode == "lfs": + nb_lfs += 1 + if metadata.upload_mode is None: + nb_lfs_unsure += 1 + if metadata.is_uploaded: + nb_preuploaded += 1 + size_preuploaded += metadata.size + if metadata.is_committed: + nb_committed += 1 + size_committed += metadata.size + total_size_str = _format_size(total_size) + + now = datetime.now() + now_str = now.strftime("%Y-%m-%d %H:%M:%S") + elapsed = now - self._started_at + elapsed_str = str(elapsed).split(".")[0] # remove milliseconds + + message = "\n" + "-" * 10 + message += f" {now_str} ({elapsed_str}) " + message += "-" * 10 + "\n" + + message += "Files: " + message += f"hashed {nb_hashed}/{total_files} ({_format_size(size_hashed)}/{total_size_str}) | " + message += f"pre-uploaded: {nb_preuploaded}/{nb_lfs} ({_format_size(size_preuploaded)}/{total_size_str})" + if nb_lfs_unsure > 0: + message += f" (+{nb_lfs_unsure} unsure)" + message += f" | committed: {nb_committed}/{total_files} ({_format_size(size_committed)}/{total_size_str})" + message += f" | ignored: {ignored_files}\n" + + message += "Workers: " + message += f"hashing: {self.nb_workers_sha256} | " + message += f"get upload mode: {self.nb_workers_get_upload_mode} | " + message += f"pre-uploading: {self.nb_workers_preupload_lfs} | " + message += f"committing: {self.nb_workers_commit} | " + message += f"waiting: {self.nb_workers_waiting}\n" + message += "-" * 51 + + return message + + def is_done(self) -> bool: + with self.lock: + return all(metadata.is_committed or metadata.should_ignore for _, metadata in self.items) + + +def _worker_job( + status: LargeUploadStatus, + api: "HfApi", + repo_id: str, + repo_type: str, + revision: str, +): + """ + Main process for a worker. The worker will perform tasks based on the priority list until all files are uploaded + and committed. If no tasks are available, the worker will wait for 10 seconds before checking again. + + If a task fails for any reason, the item(s) are put back in the queue for another worker to pick up. + + Read `upload_large_folder` docstring for more information on how tasks are prioritized. + """ + while True: + next_job: Optional[Tuple[WorkerJob, List[JOB_ITEM_T]]] = None + + # Determine next task + next_job = _determine_next_job(status) + if next_job is None: + return + job, items = next_job + + # Perform task + if job == WorkerJob.SHA256: + item = items[0] # single item + try: + _compute_sha256(item) + status.queue_get_upload_mode.put(item) + except KeyboardInterrupt: + raise + except Exception as e: + logger.error(f"Failed to compute sha256: {e}") + traceback.format_exc() + status.queue_sha256.put(item) + + with status.lock: + status.nb_workers_sha256 -= 1 + + elif job == WorkerJob.GET_UPLOAD_MODE: + try: + _get_upload_mode(items, api=api, repo_id=repo_id, repo_type=repo_type, revision=revision) + except KeyboardInterrupt: + raise + except Exception as e: + logger.error(f"Failed to get upload mode: {e}") + traceback.format_exc() + + # Items are either: + # - dropped (if should_ignore) + # - put in LFS queue (if LFS) + # - put in commit queue (if regular) + # - or put back (if error occurred). + for item in items: + _, metadata = item + if metadata.should_ignore: + continue + if metadata.upload_mode == "lfs": + status.queue_preupload_lfs.put(item) + elif metadata.upload_mode == "regular": + status.queue_commit.put(item) + else: + status.queue_get_upload_mode.put(item) + + with status.lock: + status.nb_workers_get_upload_mode -= 1 + + elif job == WorkerJob.PREUPLOAD_LFS: + try: + _preupload_lfs(items, api=api, repo_id=repo_id, repo_type=repo_type, revision=revision) + for item in items: + status.queue_commit.put(item) + except KeyboardInterrupt: + raise + except Exception as e: + logger.error(f"Failed to preupload LFS: {e}") + traceback.format_exc() + for item in items: + status.queue_preupload_lfs.put(item) + + with status.lock: + status.nb_workers_preupload_lfs -= 1 + + elif job == WorkerJob.COMMIT: + start_ts = time.time() + success = True + try: + _commit(items, api=api, repo_id=repo_id, repo_type=repo_type, revision=revision) + except KeyboardInterrupt: + raise + except Exception as e: + logger.error(f"Failed to commit: {e}") + traceback.format_exc() + for item in items: + status.queue_commit.put(item) + success = False + duration = time.time() - start_ts + status.update_chunk(success, len(items), duration) + with status.lock: + status.last_commit_attempt = time.time() + status.nb_workers_commit -= 1 + + elif job == WorkerJob.WAIT: + time.sleep(WAITING_TIME_IF_NO_TASKS) + with status.lock: + status.nb_workers_waiting -= 1 + + +def _determine_next_job(status: LargeUploadStatus) -> Optional[Tuple[WorkerJob, List[JOB_ITEM_T]]]: + with status.lock: + # 1. Commit if more than 5 minutes since last commit attempt (and at least 1 file) + if ( + status.nb_workers_commit == 0 + and status.queue_commit.qsize() > 0 + and status.last_commit_attempt is not None + and time.time() - status.last_commit_attempt > 5 * 60 + ): + status.nb_workers_commit += 1 + logger.debug("Job: commit (more than 5 minutes since last commit attempt)") + return (WorkerJob.COMMIT, _get_n(status.queue_commit, status.target_chunk())) + + # 2. Commit if at least 100 files are ready to commit + elif status.nb_workers_commit == 0 and status.queue_commit.qsize() >= 150: + status.nb_workers_commit += 1 + logger.debug("Job: commit (>100 files ready)") + return (WorkerJob.COMMIT, _get_n(status.queue_commit, status.target_chunk())) + + # 3. Get upload mode if at least 100 files + elif status.queue_get_upload_mode.qsize() >= MAX_NB_FILES_FETCH_UPLOAD_MODE: + status.nb_workers_get_upload_mode += 1 + logger.debug(f"Job: get upload mode (>{MAX_NB_FILES_FETCH_UPLOAD_MODE} files ready)") + return (WorkerJob.GET_UPLOAD_MODE, _get_n(status.queue_get_upload_mode, MAX_NB_FILES_FETCH_UPLOAD_MODE)) + + # 4. Preupload LFS file if at least `status.upload_batch_size` files and no worker is preuploading LFS + elif status.queue_preupload_lfs.qsize() >= status.upload_batch_size and status.nb_workers_preupload_lfs == 0: + status.nb_workers_preupload_lfs += 1 + logger.debug("Job: preupload LFS (no other worker preuploading LFS)") + return (WorkerJob.PREUPLOAD_LFS, _get_n(status.queue_preupload_lfs, status.upload_batch_size)) + + # 5. Compute sha256 if at least 1 file and no worker is computing sha256 + elif status.queue_sha256.qsize() > 0 and status.nb_workers_sha256 == 0: + status.nb_workers_sha256 += 1 + logger.debug("Job: sha256 (no other worker computing sha256)") + return (WorkerJob.SHA256, _get_one(status.queue_sha256)) + + # 6. Get upload mode if at least 1 file and no worker is getting upload mode + elif status.queue_get_upload_mode.qsize() > 0 and status.nb_workers_get_upload_mode == 0: + status.nb_workers_get_upload_mode += 1 + logger.debug("Job: get upload mode (no other worker getting upload mode)") + return (WorkerJob.GET_UPLOAD_MODE, _get_n(status.queue_get_upload_mode, MAX_NB_FILES_FETCH_UPLOAD_MODE)) + + # 7. Preupload LFS file if at least `status.upload_batch_size` files + # Skip if hf_transfer is enabled and there is already a worker preuploading LFS + elif status.queue_preupload_lfs.qsize() >= status.upload_batch_size and ( + status.nb_workers_preupload_lfs == 0 or not constants.HF_HUB_ENABLE_HF_TRANSFER + ): + status.nb_workers_preupload_lfs += 1 + logger.debug("Job: preupload LFS") + return (WorkerJob.PREUPLOAD_LFS, _get_n(status.queue_preupload_lfs, status.upload_batch_size)) + + # 8. Compute sha256 if at least 1 file + elif status.queue_sha256.qsize() > 0: + status.nb_workers_sha256 += 1 + logger.debug("Job: sha256") + return (WorkerJob.SHA256, _get_one(status.queue_sha256)) + + # 9. Get upload mode if at least 1 file + elif status.queue_get_upload_mode.qsize() > 0: + status.nb_workers_get_upload_mode += 1 + logger.debug("Job: get upload mode") + return (WorkerJob.GET_UPLOAD_MODE, _get_n(status.queue_get_upload_mode, MAX_NB_FILES_FETCH_UPLOAD_MODE)) + + # 10. Preupload LFS file if at least 1 file + elif status.queue_preupload_lfs.qsize() > 0: + status.nb_workers_preupload_lfs += 1 + logger.debug("Job: preupload LFS") + return (WorkerJob.PREUPLOAD_LFS, _get_n(status.queue_preupload_lfs, status.upload_batch_size)) + + # 11. Commit if at least 1 file and 1 min since last commit attempt + elif ( + status.nb_workers_commit == 0 + and status.queue_commit.qsize() > 0 + and status.last_commit_attempt is not None + and time.time() - status.last_commit_attempt > 1 * 60 + ): + status.nb_workers_commit += 1 + logger.debug("Job: commit (1 min since last commit attempt)") + return (WorkerJob.COMMIT, _get_n(status.queue_commit, status.target_chunk())) + + # 12. Commit if at least 1 file all other queues are empty and all workers are waiting + # e.g. when it's the last commit + elif ( + status.nb_workers_commit == 0 + and status.queue_commit.qsize() > 0 + and status.queue_sha256.qsize() == 0 + and status.queue_get_upload_mode.qsize() == 0 + and status.queue_preupload_lfs.qsize() == 0 + and status.nb_workers_sha256 == 0 + and status.nb_workers_get_upload_mode == 0 + and status.nb_workers_preupload_lfs == 0 + ): + status.nb_workers_commit += 1 + logger.debug("Job: commit") + return (WorkerJob.COMMIT, _get_n(status.queue_commit, status.target_chunk())) + + # 13. If all queues are empty, exit + elif all(metadata.is_committed or metadata.should_ignore for _, metadata in status.items): + logger.info("All files have been processed! Exiting worker.") + return None + + # 14. If no task is available, wait + else: + status.nb_workers_waiting += 1 + logger.debug(f"No task available, waiting... ({WAITING_TIME_IF_NO_TASKS}s)") + return (WorkerJob.WAIT, []) + + +#################### +# Atomic jobs (sha256, get_upload_mode, preupload_lfs, commit) +#################### + + +def _compute_sha256(item: JOB_ITEM_T) -> None: + """Compute sha256 of a file and save it in metadata.""" + paths, metadata = item + if metadata.sha256 is None: + with paths.file_path.open("rb") as f: + metadata.sha256 = sha_fileobj(f).hex() + metadata.save(paths) + + +def _get_upload_mode(items: List[JOB_ITEM_T], api: "HfApi", repo_id: str, repo_type: str, revision: str) -> None: + """Get upload mode for each file and update metadata. + + Also receive info if the file should be ignored. + """ + additions = [_build_hacky_operation(item) for item in items] + _fetch_upload_modes( + additions=additions, + repo_type=repo_type, + repo_id=repo_id, + headers=api._build_hf_headers(), + revision=quote(revision, safe=""), + endpoint=api.endpoint, + ) + for item, addition in zip(items, additions): + paths, metadata = item + metadata.upload_mode = addition._upload_mode + metadata.should_ignore = addition._should_ignore + metadata.remote_oid = addition._remote_oid + metadata.save(paths) + + +def _preupload_lfs(items: List[JOB_ITEM_T], api: "HfApi", repo_id: str, repo_type: str, revision: str) -> None: + """Preupload LFS files and update metadata.""" + additions = [_build_hacky_operation(item) for item in items] + api.preupload_lfs_files( + repo_id=repo_id, + repo_type=repo_type, + revision=revision, + additions=additions, + ) + + for paths, metadata in items: + metadata.is_uploaded = True + metadata.save(paths) + + +def _commit(items: List[JOB_ITEM_T], api: "HfApi", repo_id: str, repo_type: str, revision: str) -> None: + """Commit files to the repo.""" + additions = [_build_hacky_operation(item) for item in items] + api.create_commit( + repo_id=repo_id, + repo_type=repo_type, + revision=revision, + operations=additions, + commit_message="Add files using upload-large-folder tool", + ) + for paths, metadata in items: + metadata.is_committed = True + metadata.save(paths) + + +#################### +# Hacks with CommitOperationAdd to bypass checks/sha256 calculation +#################### + + +class HackyCommitOperationAdd(CommitOperationAdd): + def __post_init__(self) -> None: + if isinstance(self.path_or_fileobj, Path): + self.path_or_fileobj = str(self.path_or_fileobj) + + +def _build_hacky_operation(item: JOB_ITEM_T) -> HackyCommitOperationAdd: + paths, metadata = item + operation = HackyCommitOperationAdd(path_in_repo=paths.path_in_repo, path_or_fileobj=paths.file_path) + with paths.file_path.open("rb") as file: + sample = file.peek(512)[:512] + if metadata.sha256 is None: + raise ValueError("sha256 must have been computed by now!") + operation.upload_info = UploadInfo(sha256=bytes.fromhex(metadata.sha256), size=metadata.size, sample=sample) + operation._upload_mode = metadata.upload_mode # type: ignore[assignment] + operation._should_ignore = metadata.should_ignore + operation._remote_oid = metadata.remote_oid + return operation + + +#################### +# Misc helpers +#################### + + +def _get_one(queue: "queue.Queue[JOB_ITEM_T]") -> List[JOB_ITEM_T]: + return [queue.get()] + + +def _get_n(queue: "queue.Queue[JOB_ITEM_T]", n: int) -> List[JOB_ITEM_T]: + return [queue.get() for _ in range(min(queue.qsize(), n))] + + +def _print_overwrite(report: str) -> None: + """Print a report, overwriting the previous lines. + + Since tqdm in using `sys.stderr` to (re-)write progress bars, we need to use `sys.stdout` + to print the report. + + Note: works well only if no other process is writing to `sys.stdout`! + """ + report += "\n" + # Get terminal width + terminal_width = shutil.get_terminal_size().columns + + # Count number of lines that should be cleared + nb_lines = sum(len(line) // terminal_width + 1 for line in report.splitlines()) + + # Clear previous lines based on the number of lines in the report + for _ in range(nb_lines): + sys.stdout.write("\r\033[K") # Clear line + sys.stdout.write("\033[F") # Move cursor up one line + + # Print the new report, filling remaining space with whitespace + sys.stdout.write(report) + sys.stdout.write(" " * (terminal_width - len(report.splitlines()[-1]))) + sys.stdout.flush() diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/_webhooks_payload.py b/venv/lib/python3.13/site-packages/huggingface_hub/_webhooks_payload.py new file mode 100644 index 0000000000000000000000000000000000000000..288f4b08b9428980e99ca06703442eab62fad277 --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/_webhooks_payload.py @@ -0,0 +1,137 @@ +# coding=utf-8 +# Copyright 2023-present, the HuggingFace Inc. team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Contains data structures to parse the webhooks payload.""" + +from typing import List, Literal, Optional + +from .utils import is_pydantic_available + + +if is_pydantic_available(): + from pydantic import BaseModel +else: + # Define a dummy BaseModel to avoid import errors when pydantic is not installed + # Import error will be raised when trying to use the class + + class BaseModel: # type: ignore [no-redef] + def __init__(self, *args, **kwargs) -> None: + raise ImportError( + "You must have `pydantic` installed to use `WebhookPayload`. This is an optional dependency that" + " should be installed separately. Please run `pip install --upgrade pydantic` and retry." + ) + + +# This is an adaptation of the ReportV3 interface implemented in moon-landing. V0, V1 and V2 have been ignored as they +# are not in used anymore. To keep in sync when format is updated in +# https://github.com/huggingface/moon-landing/blob/main/server/lib/HFWebhooks.ts (internal link). + + +WebhookEvent_T = Literal[ + "create", + "delete", + "move", + "update", +] +RepoChangeEvent_T = Literal[ + "add", + "move", + "remove", + "update", +] +RepoType_T = Literal[ + "dataset", + "model", + "space", +] +DiscussionStatus_T = Literal[ + "closed", + "draft", + "open", + "merged", +] +SupportedWebhookVersion = Literal[3] + + +class ObjectId(BaseModel): + id: str + + +class WebhookPayloadUrl(BaseModel): + web: str + api: Optional[str] = None + + +class WebhookPayloadMovedTo(BaseModel): + name: str + owner: ObjectId + + +class WebhookPayloadWebhook(ObjectId): + version: SupportedWebhookVersion + + +class WebhookPayloadEvent(BaseModel): + action: WebhookEvent_T + scope: str + + +class WebhookPayloadDiscussionChanges(BaseModel): + base: str + mergeCommitId: Optional[str] = None + + +class WebhookPayloadComment(ObjectId): + author: ObjectId + hidden: bool + content: Optional[str] = None + url: WebhookPayloadUrl + + +class WebhookPayloadDiscussion(ObjectId): + num: int + author: ObjectId + url: WebhookPayloadUrl + title: str + isPullRequest: bool + status: DiscussionStatus_T + changes: Optional[WebhookPayloadDiscussionChanges] = None + pinned: Optional[bool] = None + + +class WebhookPayloadRepo(ObjectId): + owner: ObjectId + head_sha: Optional[str] = None + name: str + private: bool + subdomain: Optional[str] = None + tags: Optional[List[str]] = None + type: Literal["dataset", "model", "space"] + url: WebhookPayloadUrl + + +class WebhookPayloadUpdatedRef(BaseModel): + ref: str + oldSha: Optional[str] = None + newSha: Optional[str] = None + + +class WebhookPayload(BaseModel): + event: WebhookPayloadEvent + repo: WebhookPayloadRepo + discussion: Optional[WebhookPayloadDiscussion] = None + comment: Optional[WebhookPayloadComment] = None + webhook: WebhookPayloadWebhook + movedTo: Optional[WebhookPayloadMovedTo] = None + updatedRefs: Optional[List[WebhookPayloadUpdatedRef]] = None diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/_webhooks_server.py b/venv/lib/python3.13/site-packages/huggingface_hub/_webhooks_server.py new file mode 100644 index 0000000000000000000000000000000000000000..a3668304553e13f9605a59ec623aceb5202a2488 --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/_webhooks_server.py @@ -0,0 +1,376 @@ +# coding=utf-8 +# Copyright 2023-present, the HuggingFace Inc. team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Contains `WebhooksServer` and `webhook_endpoint` to create a webhook server easily.""" + +import atexit +import inspect +import os +from functools import wraps +from typing import TYPE_CHECKING, Any, Callable, Dict, Optional + +from .utils import experimental, is_fastapi_available, is_gradio_available + + +if TYPE_CHECKING: + import gradio as gr + from fastapi import Request + +if is_fastapi_available(): + from fastapi import FastAPI, Request + from fastapi.responses import JSONResponse +else: + # Will fail at runtime if FastAPI is not available + FastAPI = Request = JSONResponse = None # type: ignore + + +_global_app: Optional["WebhooksServer"] = None +_is_local = os.environ.get("SPACE_ID") is None + + +@experimental +class WebhooksServer: + """ + The [`WebhooksServer`] class lets you create an instance of a Gradio app that can receive Huggingface webhooks. + These webhooks can be registered using the [`~WebhooksServer.add_webhook`] decorator. Webhook endpoints are added to + the app as a POST endpoint to the FastAPI router. Once all the webhooks are registered, the `launch` method has to be + called to start the app. + + It is recommended to accept [`WebhookPayload`] as the first argument of the webhook function. It is a Pydantic + model that contains all the information about the webhook event. The data will be parsed automatically for you. + + Check out the [webhooks guide](../guides/webhooks_server) for a step-by-step tutorial on how to setup your + WebhooksServer and deploy it on a Space. + + > [!WARNING] + > `WebhooksServer` is experimental. Its API is subject to change in the future. + + > [!WARNING] + > You must have `gradio` installed to use `WebhooksServer` (`pip install --upgrade gradio`). + + Args: + ui (`gradio.Blocks`, optional): + A Gradio UI instance to be used as the Space landing page. If `None`, a UI displaying instructions + about the configured webhooks is created. + webhook_secret (`str`, optional): + A secret key to verify incoming webhook requests. You can set this value to any secret you want as long as + you also configure it in your [webhooks settings panel](https://huggingface.co/settings/webhooks). You + can also set this value as the `WEBHOOK_SECRET` environment variable. If no secret is provided, the + webhook endpoints are opened without any security. + + Example: + + ```python + import gradio as gr + from huggingface_hub import WebhooksServer, WebhookPayload + + with gr.Blocks() as ui: + ... + + app = WebhooksServer(ui=ui, webhook_secret="my_secret_key") + + @app.add_webhook("/say_hello") + async def hello(payload: WebhookPayload): + return {"message": "hello"} + + app.launch() + ``` + """ + + def __new__(cls, *args, **kwargs) -> "WebhooksServer": + if not is_gradio_available(): + raise ImportError( + "You must have `gradio` installed to use `WebhooksServer`. Please run `pip install --upgrade gradio`" + " first." + ) + if not is_fastapi_available(): + raise ImportError( + "You must have `fastapi` installed to use `WebhooksServer`. Please run `pip install --upgrade fastapi`" + " first." + ) + return super().__new__(cls) + + def __init__( + self, + ui: Optional["gr.Blocks"] = None, + webhook_secret: Optional[str] = None, + ) -> None: + self._ui = ui + + self.webhook_secret = webhook_secret or os.getenv("WEBHOOK_SECRET") + self.registered_webhooks: Dict[str, Callable] = {} + _warn_on_empty_secret(self.webhook_secret) + + def add_webhook(self, path: Optional[str] = None) -> Callable: + """ + Decorator to add a webhook to the [`WebhooksServer`] server. + + Args: + path (`str`, optional): + The URL path to register the webhook function. If not provided, the function name will be used as the + path. In any case, all webhooks are registered under `/webhooks`. + + Raises: + ValueError: If the provided path is already registered as a webhook. + + Example: + ```python + from huggingface_hub import WebhooksServer, WebhookPayload + + app = WebhooksServer() + + @app.add_webhook + async def trigger_training(payload: WebhookPayload): + if payload.repo.type == "dataset" and payload.event.action == "update": + # Trigger a training job if a dataset is updated + ... + + app.launch() + ``` + """ + # Usage: directly as decorator. Example: `@app.add_webhook` + if callable(path): + # If path is a function, it means it was used as a decorator without arguments + return self.add_webhook()(path) + + # Usage: provide a path. Example: `@app.add_webhook(...)` + @wraps(FastAPI.post) + def _inner_post(*args, **kwargs): + func = args[0] + abs_path = f"/webhooks/{(path or func.__name__).strip('/')}" + if abs_path in self.registered_webhooks: + raise ValueError(f"Webhook {abs_path} already exists.") + self.registered_webhooks[abs_path] = func + + return _inner_post + + def launch(self, prevent_thread_lock: bool = False, **launch_kwargs: Any) -> None: + """Launch the Gradio app and register webhooks to the underlying FastAPI server. + + Input parameters are forwarded to Gradio when launching the app. + """ + ui = self._ui or self._get_default_ui() + + # Start Gradio App + # - as non-blocking so that webhooks can be added afterwards + # - as shared if launch locally (to debug webhooks) + launch_kwargs.setdefault("share", _is_local) + self.fastapi_app, _, _ = ui.launch(prevent_thread_lock=True, **launch_kwargs) + + # Register webhooks to FastAPI app + for path, func in self.registered_webhooks.items(): + # Add secret check if required + if self.webhook_secret is not None: + func = _wrap_webhook_to_check_secret(func, webhook_secret=self.webhook_secret) + + # Add route to FastAPI app + self.fastapi_app.post(path)(func) + + # Print instructions and block main thread + space_host = os.environ.get("SPACE_HOST") + url = "https://" + space_host if space_host is not None else (ui.share_url or ui.local_url) + if url is None: + raise ValueError("Cannot find the URL of the app. Please provide a valid `ui` or update `gradio` version.") + url = url.strip("/") + message = "\nWebhooks are correctly setup and ready to use:" + message += "\n" + "\n".join(f" - POST {url}{webhook}" for webhook in self.registered_webhooks) + message += "\nGo to https://huggingface.co/settings/webhooks to setup your webhooks." + print(message) + + if not prevent_thread_lock: + ui.block_thread() + + def _get_default_ui(self) -> "gr.Blocks": + """Default UI if not provided (lists webhooks and provides basic instructions).""" + import gradio as gr + + with gr.Blocks() as ui: + gr.Markdown("# This is an app to process 🤗 Webhooks") + gr.Markdown( + "Webhooks are a foundation for MLOps-related features. They allow you to listen for new changes on" + " specific repos or to all repos belonging to particular set of users/organizations (not just your" + " repos, but any repo). Check out this [guide](https://huggingface.co/docs/hub/webhooks) to get to" + " know more about webhooks on the Huggingface Hub." + ) + gr.Markdown( + f"{len(self.registered_webhooks)} webhook(s) are registered:" + + "\n\n" + + "\n ".join( + f"- [{webhook_path}]({_get_webhook_doc_url(webhook.__name__, webhook_path)})" + for webhook_path, webhook in self.registered_webhooks.items() + ) + ) + gr.Markdown( + "Go to https://huggingface.co/settings/webhooks to setup your webhooks." + + "\nYou app is running locally. Please look at the logs to check the full URL you need to set." + if _is_local + else ( + "\nThis app is running on a Space. You can find the corresponding URL in the options menu" + " (top-right) > 'Embed the Space'. The URL looks like 'https://{username}-{repo_name}.hf.space'." + ) + ) + return ui + + +@experimental +def webhook_endpoint(path: Optional[str] = None) -> Callable: + """Decorator to start a [`WebhooksServer`] and register the decorated function as a webhook endpoint. + + This is a helper to get started quickly. If you need more flexibility (custom landing page or webhook secret), + you can use [`WebhooksServer`] directly. You can register multiple webhook endpoints (to the same server) by using + this decorator multiple times. + + Check out the [webhooks guide](../guides/webhooks_server) for a step-by-step tutorial on how to setup your + server and deploy it on a Space. + + > [!WARNING] + > `webhook_endpoint` is experimental. Its API is subject to change in the future. + + > [!WARNING] + > You must have `gradio` installed to use `webhook_endpoint` (`pip install --upgrade gradio`). + + Args: + path (`str`, optional): + The URL path to register the webhook function. If not provided, the function name will be used as the path. + In any case, all webhooks are registered under `/webhooks`. + + Examples: + The default usage is to register a function as a webhook endpoint. The function name will be used as the path. + The server will be started automatically at exit (i.e. at the end of the script). + + ```python + from huggingface_hub import webhook_endpoint, WebhookPayload + + @webhook_endpoint + async def trigger_training(payload: WebhookPayload): + if payload.repo.type == "dataset" and payload.event.action == "update": + # Trigger a training job if a dataset is updated + ... + + # Server is automatically started at the end of the script. + ``` + + Advanced usage: register a function as a webhook endpoint and start the server manually. This is useful if you + are running it in a notebook. + + ```python + from huggingface_hub import webhook_endpoint, WebhookPayload + + @webhook_endpoint + async def trigger_training(payload: WebhookPayload): + if payload.repo.type == "dataset" and payload.event.action == "update": + # Trigger a training job if a dataset is updated + ... + + # Start the server manually + trigger_training.launch() + ``` + """ + if callable(path): + # If path is a function, it means it was used as a decorator without arguments + return webhook_endpoint()(path) + + @wraps(WebhooksServer.add_webhook) + def _inner(func: Callable) -> Callable: + app = _get_global_app() + app.add_webhook(path)(func) + if len(app.registered_webhooks) == 1: + # Register `app.launch` to run at exit (only once) + atexit.register(app.launch) + + @wraps(app.launch) + def _launch_now(): + # Run the app directly (without waiting atexit) + atexit.unregister(app.launch) + app.launch() + + func.launch = _launch_now # type: ignore + return func + + return _inner + + +def _get_global_app() -> WebhooksServer: + global _global_app + if _global_app is None: + _global_app = WebhooksServer() + return _global_app + + +def _warn_on_empty_secret(webhook_secret: Optional[str]) -> None: + if webhook_secret is None: + print("Webhook secret is not defined. This means your webhook endpoints will be open to everyone.") + print( + "To add a secret, set `WEBHOOK_SECRET` as environment variable or pass it at initialization: " + "\n\t`app = WebhooksServer(webhook_secret='my_secret', ...)`" + ) + print( + "For more details about webhook secrets, please refer to" + " https://huggingface.co/docs/hub/webhooks#webhook-secret." + ) + else: + print("Webhook secret is correctly defined.") + + +def _get_webhook_doc_url(webhook_name: str, webhook_path: str) -> str: + """Returns the anchor to a given webhook in the docs (experimental)""" + return "/docs#/default/" + webhook_name + webhook_path.replace("/", "_") + "_post" + + +def _wrap_webhook_to_check_secret(func: Callable, webhook_secret: str) -> Callable: + """Wraps a webhook function to check the webhook secret before calling the function. + + This is a hacky way to add the `request` parameter to the function signature. Since FastAPI based itself on route + parameters to inject the values to the function, we need to hack the function signature to retrieve the `Request` + object (and hence the headers). A far cleaner solution would be to use a middleware. However, since + `fastapi==0.90.1`, a middleware cannot be added once the app has started. And since the FastAPI app is started by + Gradio internals (and not by us), we cannot add a middleware. + + This method is called only when a secret has been defined by the user. If a request is sent without the + "x-webhook-secret", the function will return a 401 error (unauthorized). If the header is sent but is incorrect, + the function will return a 403 error (forbidden). + + Inspired by https://stackoverflow.com/a/33112180. + """ + initial_sig = inspect.signature(func) + + @wraps(func) + async def _protected_func(request: Request, **kwargs): + request_secret = request.headers.get("x-webhook-secret") + if request_secret is None: + return JSONResponse({"error": "x-webhook-secret header not set."}, status_code=401) + if request_secret != webhook_secret: + return JSONResponse({"error": "Invalid webhook secret."}, status_code=403) + + # Inject `request` in kwargs if required + if "request" in initial_sig.parameters: + kwargs["request"] = request + + # Handle both sync and async routes + if inspect.iscoroutinefunction(func): + return await func(**kwargs) + else: + return func(**kwargs) + + # Update signature to include request + if "request" not in initial_sig.parameters: + _protected_func.__signature__ = initial_sig.replace( # type: ignore + parameters=( + inspect.Parameter(name="request", kind=inspect.Parameter.POSITIONAL_OR_KEYWORD, annotation=Request), + ) + + tuple(initial_sig.parameters.values()) + ) + + # Return protected route + return _protected_func diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/community.py b/venv/lib/python3.13/site-packages/huggingface_hub/community.py new file mode 100644 index 0000000000000000000000000000000000000000..ffaab355174689b1dfb5b1c95f06fc088859d4cf --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/community.py @@ -0,0 +1,363 @@ +""" +Data structures to interact with Discussions and Pull Requests on the Hub. + +See [the Discussions and Pull Requests guide](https://huggingface.co/docs/hub/repositories-pull-requests-discussions) +for more information on Pull Requests, Discussions, and the community tab. +""" + +from dataclasses import dataclass +from datetime import datetime +from typing import List, Literal, Optional, TypedDict, Union + +from . import constants +from .utils import parse_datetime + + +DiscussionStatus = Literal["open", "closed", "merged", "draft"] + + +@dataclass +class Discussion: + """ + A Discussion or Pull Request on the Hub. + + This dataclass is not intended to be instantiated directly. + + Attributes: + title (`str`): + The title of the Discussion / Pull Request + status (`str`): + The status of the Discussion / Pull Request. + It must be one of: + * `"open"` + * `"closed"` + * `"merged"` (only for Pull Requests ) + * `"draft"` (only for Pull Requests ) + num (`int`): + The number of the Discussion / Pull Request. + repo_id (`str`): + The id (`"{namespace}/{repo_name}"`) of the repo on which + the Discussion / Pull Request was open. + repo_type (`str`): + The type of the repo on which the Discussion / Pull Request was open. + Possible values are: `"model"`, `"dataset"`, `"space"`. + author (`str`): + The username of the Discussion / Pull Request author. + Can be `"deleted"` if the user has been deleted since. + is_pull_request (`bool`): + Whether or not this is a Pull Request. + created_at (`datetime`): + The `datetime` of creation of the Discussion / Pull Request. + endpoint (`str`): + Endpoint of the Hub. Default is https://huggingface.co. + git_reference (`str`, *optional*): + (property) Git reference to which changes can be pushed if this is a Pull Request, `None` otherwise. + url (`str`): + (property) URL of the discussion on the Hub. + """ + + title: str + status: DiscussionStatus + num: int + repo_id: str + repo_type: str + author: str + is_pull_request: bool + created_at: datetime + endpoint: str + + @property + def git_reference(self) -> Optional[str]: + """ + If this is a Pull Request , returns the git reference to which changes can be pushed. + Returns `None` otherwise. + """ + if self.is_pull_request: + return f"refs/pr/{self.num}" + return None + + @property + def url(self) -> str: + """Returns the URL of the discussion on the Hub.""" + if self.repo_type is None or self.repo_type == constants.REPO_TYPE_MODEL: + return f"{self.endpoint}/{self.repo_id}/discussions/{self.num}" + return f"{self.endpoint}/{self.repo_type}s/{self.repo_id}/discussions/{self.num}" + + +@dataclass +class DiscussionWithDetails(Discussion): + """ + Subclass of [`Discussion`]. + + Attributes: + title (`str`): + The title of the Discussion / Pull Request + status (`str`): + The status of the Discussion / Pull Request. + It can be one of: + * `"open"` + * `"closed"` + * `"merged"` (only for Pull Requests ) + * `"draft"` (only for Pull Requests ) + num (`int`): + The number of the Discussion / Pull Request. + repo_id (`str`): + The id (`"{namespace}/{repo_name}"`) of the repo on which + the Discussion / Pull Request was open. + repo_type (`str`): + The type of the repo on which the Discussion / Pull Request was open. + Possible values are: `"model"`, `"dataset"`, `"space"`. + author (`str`): + The username of the Discussion / Pull Request author. + Can be `"deleted"` if the user has been deleted since. + is_pull_request (`bool`): + Whether or not this is a Pull Request. + created_at (`datetime`): + The `datetime` of creation of the Discussion / Pull Request. + events (`list` of [`DiscussionEvent`]) + The list of [`DiscussionEvents`] in this Discussion or Pull Request. + conflicting_files (`Union[List[str], bool, None]`, *optional*): + A list of conflicting files if this is a Pull Request. + `None` if `self.is_pull_request` is `False`. + `True` if there are conflicting files but the list can't be retrieved. + target_branch (`str`, *optional*): + The branch into which changes are to be merged if this is a + Pull Request . `None` if `self.is_pull_request` is `False`. + merge_commit_oid (`str`, *optional*): + If this is a merged Pull Request , this is set to the OID / SHA of + the merge commit, `None` otherwise. + diff (`str`, *optional*): + The git diff if this is a Pull Request , `None` otherwise. + endpoint (`str`): + Endpoint of the Hub. Default is https://huggingface.co. + git_reference (`str`, *optional*): + (property) Git reference to which changes can be pushed if this is a Pull Request, `None` otherwise. + url (`str`): + (property) URL of the discussion on the Hub. + """ + + events: List["DiscussionEvent"] + conflicting_files: Union[List[str], bool, None] + target_branch: Optional[str] + merge_commit_oid: Optional[str] + diff: Optional[str] + + +class DiscussionEventArgs(TypedDict): + id: str + type: str + created_at: datetime + author: str + _event: dict + + +@dataclass +class DiscussionEvent: + """ + An event in a Discussion or Pull Request. + + Use concrete classes: + * [`DiscussionComment`] + * [`DiscussionStatusChange`] + * [`DiscussionCommit`] + * [`DiscussionTitleChange`] + + Attributes: + id (`str`): + The ID of the event. An hexadecimal string. + type (`str`): + The type of the event. + created_at (`datetime`): + A [`datetime`](https://docs.python.org/3/library/datetime.html?highlight=datetime#datetime.datetime) + object holding the creation timestamp for the event. + author (`str`): + The username of the Discussion / Pull Request author. + Can be `"deleted"` if the user has been deleted since. + """ + + id: str + type: str + created_at: datetime + author: str + + _event: dict + """Stores the original event data, in case we need to access it later.""" + + +@dataclass +class DiscussionComment(DiscussionEvent): + """A comment in a Discussion / Pull Request. + + Subclass of [`DiscussionEvent`]. + + + Attributes: + id (`str`): + The ID of the event. An hexadecimal string. + type (`str`): + The type of the event. + created_at (`datetime`): + A [`datetime`](https://docs.python.org/3/library/datetime.html?highlight=datetime#datetime.datetime) + object holding the creation timestamp for the event. + author (`str`): + The username of the Discussion / Pull Request author. + Can be `"deleted"` if the user has been deleted since. + content (`str`): + The raw markdown content of the comment. Mentions, links and images are not rendered. + edited (`bool`): + Whether or not this comment has been edited. + hidden (`bool`): + Whether or not this comment has been hidden. + """ + + content: str + edited: bool + hidden: bool + + @property + def rendered(self) -> str: + """The rendered comment, as a HTML string""" + return self._event["data"]["latest"]["html"] + + @property + def last_edited_at(self) -> datetime: + """The last edit time, as a `datetime` object.""" + return parse_datetime(self._event["data"]["latest"]["updatedAt"]) + + @property + def last_edited_by(self) -> str: + """The last edit time, as a `datetime` object.""" + return self._event["data"]["latest"].get("author", {}).get("name", "deleted") + + @property + def edit_history(self) -> List[dict]: + """The edit history of the comment""" + return self._event["data"]["history"] + + @property + def number_of_edits(self) -> int: + return len(self.edit_history) + + +@dataclass +class DiscussionStatusChange(DiscussionEvent): + """A change of status in a Discussion / Pull Request. + + Subclass of [`DiscussionEvent`]. + + Attributes: + id (`str`): + The ID of the event. An hexadecimal string. + type (`str`): + The type of the event. + created_at (`datetime`): + A [`datetime`](https://docs.python.org/3/library/datetime.html?highlight=datetime#datetime.datetime) + object holding the creation timestamp for the event. + author (`str`): + The username of the Discussion / Pull Request author. + Can be `"deleted"` if the user has been deleted since. + new_status (`str`): + The status of the Discussion / Pull Request after the change. + It can be one of: + * `"open"` + * `"closed"` + * `"merged"` (only for Pull Requests ) + """ + + new_status: str + + +@dataclass +class DiscussionCommit(DiscussionEvent): + """A commit in a Pull Request. + + Subclass of [`DiscussionEvent`]. + + Attributes: + id (`str`): + The ID of the event. An hexadecimal string. + type (`str`): + The type of the event. + created_at (`datetime`): + A [`datetime`](https://docs.python.org/3/library/datetime.html?highlight=datetime#datetime.datetime) + object holding the creation timestamp for the event. + author (`str`): + The username of the Discussion / Pull Request author. + Can be `"deleted"` if the user has been deleted since. + summary (`str`): + The summary of the commit. + oid (`str`): + The OID / SHA of the commit, as a hexadecimal string. + """ + + summary: str + oid: str + + +@dataclass +class DiscussionTitleChange(DiscussionEvent): + """A rename event in a Discussion / Pull Request. + + Subclass of [`DiscussionEvent`]. + + Attributes: + id (`str`): + The ID of the event. An hexadecimal string. + type (`str`): + The type of the event. + created_at (`datetime`): + A [`datetime`](https://docs.python.org/3/library/datetime.html?highlight=datetime#datetime.datetime) + object holding the creation timestamp for the event. + author (`str`): + The username of the Discussion / Pull Request author. + Can be `"deleted"` if the user has been deleted since. + old_title (`str`): + The previous title for the Discussion / Pull Request. + new_title (`str`): + The new title. + """ + + old_title: str + new_title: str + + +def deserialize_event(event: dict) -> DiscussionEvent: + """Instantiates a [`DiscussionEvent`] from a dict""" + event_id: str = event["id"] + event_type: str = event["type"] + created_at = parse_datetime(event["createdAt"]) + + common_args: DiscussionEventArgs = { + "id": event_id, + "type": event_type, + "created_at": created_at, + "author": event.get("author", {}).get("name", "deleted"), + "_event": event, + } + + if event_type == "comment": + return DiscussionComment( + **common_args, + edited=event["data"]["edited"], + hidden=event["data"]["hidden"], + content=event["data"]["latest"]["raw"], + ) + if event_type == "status-change": + return DiscussionStatusChange( + **common_args, + new_status=event["data"]["status"], + ) + if event_type == "commit": + return DiscussionCommit( + **common_args, + summary=event["data"]["subject"], + oid=event["data"]["oid"], + ) + if event_type == "title-change": + return DiscussionTitleChange( + **common_args, + old_title=event["data"]["from"], + new_title=event["data"]["to"], + ) + + return DiscussionEvent(**common_args) diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/constants.py b/venv/lib/python3.13/site-packages/huggingface_hub/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..b30b2c01d99c5ee5428875f3711227024f5d0829 --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/constants.py @@ -0,0 +1,294 @@ +import os +import re +import typing +from typing import Literal, Optional, Tuple + + +# Possible values for env variables + + +ENV_VARS_TRUE_VALUES = {"1", "ON", "YES", "TRUE"} +ENV_VARS_TRUE_AND_AUTO_VALUES = ENV_VARS_TRUE_VALUES.union({"AUTO"}) + + +def _is_true(value: Optional[str]) -> bool: + if value is None: + return False + return value.upper() in ENV_VARS_TRUE_VALUES + + +def _as_int(value: Optional[str]) -> Optional[int]: + if value is None: + return None + return int(value) + + +# Constants for file downloads + +PYTORCH_WEIGHTS_NAME = "pytorch_model.bin" +TF2_WEIGHTS_NAME = "tf_model.h5" +TF_WEIGHTS_NAME = "model.ckpt" +FLAX_WEIGHTS_NAME = "flax_model.msgpack" +CONFIG_NAME = "config.json" +REPOCARD_NAME = "README.md" +DEFAULT_ETAG_TIMEOUT = 10 +DEFAULT_DOWNLOAD_TIMEOUT = 10 +DEFAULT_REQUEST_TIMEOUT = 10 +DOWNLOAD_CHUNK_SIZE = 10 * 1024 * 1024 +HF_TRANSFER_CONCURRENCY = 100 +MAX_HTTP_DOWNLOAD_SIZE = 50 * 1000 * 1000 * 1000 # 50 GB + +# Constants for serialization + +PYTORCH_WEIGHTS_FILE_PATTERN = "pytorch_model{suffix}.bin" # Unsafe pickle: use safetensors instead +SAFETENSORS_WEIGHTS_FILE_PATTERN = "model{suffix}.safetensors" +TF2_WEIGHTS_FILE_PATTERN = "tf_model{suffix}.h5" + +# Constants for safetensors repos + +SAFETENSORS_SINGLE_FILE = "model.safetensors" +SAFETENSORS_INDEX_FILE = "model.safetensors.index.json" +SAFETENSORS_MAX_HEADER_LENGTH = 25_000_000 + +# Timeout of aquiring file lock and logging the attempt +FILELOCK_LOG_EVERY_SECONDS = 10 + +# Git-related constants + +DEFAULT_REVISION = "main" +REGEX_COMMIT_OID = re.compile(r"[A-Fa-f0-9]{5,40}") + +HUGGINGFACE_CO_URL_HOME = "https://huggingface.co/" + +_staging_mode = _is_true(os.environ.get("HUGGINGFACE_CO_STAGING")) + +_HF_DEFAULT_ENDPOINT = "https://huggingface.co" +_HF_DEFAULT_STAGING_ENDPOINT = "https://hub-ci.huggingface.co" +ENDPOINT = os.getenv("HF_ENDPOINT", _HF_DEFAULT_ENDPOINT).rstrip("/") +HUGGINGFACE_CO_URL_TEMPLATE = ENDPOINT + "/{repo_id}/resolve/{revision}/{filename}" + +if _staging_mode: + ENDPOINT = _HF_DEFAULT_STAGING_ENDPOINT + HUGGINGFACE_CO_URL_TEMPLATE = _HF_DEFAULT_STAGING_ENDPOINT + "/{repo_id}/resolve/{revision}/{filename}" + +HUGGINGFACE_HEADER_X_REPO_COMMIT = "X-Repo-Commit" +HUGGINGFACE_HEADER_X_LINKED_ETAG = "X-Linked-Etag" +HUGGINGFACE_HEADER_X_LINKED_SIZE = "X-Linked-Size" +HUGGINGFACE_HEADER_X_BILL_TO = "X-HF-Bill-To" + +INFERENCE_ENDPOINT = os.environ.get("HF_INFERENCE_ENDPOINT", "https://api-inference.huggingface.co") + +# See https://huggingface.co/docs/inference-endpoints/index +INFERENCE_ENDPOINTS_ENDPOINT = "https://api.endpoints.huggingface.cloud/v2" +INFERENCE_CATALOG_ENDPOINT = "https://endpoints.huggingface.co/api/catalog" + +# See https://api.endpoints.huggingface.cloud/#post-/v2/endpoint/-namespace- +INFERENCE_ENDPOINT_IMAGE_KEYS = [ + "custom", + "huggingface", + "huggingfaceNeuron", + "llamacpp", + "tei", + "tgi", + "tgiNeuron", +] + +# Proxy for third-party providers +INFERENCE_PROXY_TEMPLATE = "https://router.huggingface.co/{provider}" + +REPO_ID_SEPARATOR = "--" +# ^ this substring is not allowed in repo_ids on hf.co +# and is the canonical one we use for serialization of repo ids elsewhere. + + +REPO_TYPE_DATASET = "dataset" +REPO_TYPE_SPACE = "space" +REPO_TYPE_MODEL = "model" +REPO_TYPES = [None, REPO_TYPE_MODEL, REPO_TYPE_DATASET, REPO_TYPE_SPACE] +SPACES_SDK_TYPES = ["gradio", "streamlit", "docker", "static"] + +REPO_TYPES_URL_PREFIXES = { + REPO_TYPE_DATASET: "datasets/", + REPO_TYPE_SPACE: "spaces/", +} +REPO_TYPES_MAPPING = { + "datasets": REPO_TYPE_DATASET, + "spaces": REPO_TYPE_SPACE, + "models": REPO_TYPE_MODEL, +} + +DiscussionTypeFilter = Literal["all", "discussion", "pull_request"] +DISCUSSION_TYPES: Tuple[DiscussionTypeFilter, ...] = typing.get_args(DiscussionTypeFilter) +DiscussionStatusFilter = Literal["all", "open", "closed"] +DISCUSSION_STATUS: Tuple[DiscussionTypeFilter, ...] = typing.get_args(DiscussionStatusFilter) + +# Webhook subscription types +WEBHOOK_DOMAIN_T = Literal["repo", "discussions"] + +# default cache +default_home = os.path.join(os.path.expanduser("~"), ".cache") +HF_HOME = os.path.expandvars( + os.path.expanduser( + os.getenv( + "HF_HOME", + os.path.join(os.getenv("XDG_CACHE_HOME", default_home), "huggingface"), + ) + ) +) +hf_cache_home = HF_HOME # for backward compatibility. TODO: remove this in 1.0.0 + +default_cache_path = os.path.join(HF_HOME, "hub") +default_assets_cache_path = os.path.join(HF_HOME, "assets") + +# Legacy env variables +HUGGINGFACE_HUB_CACHE = os.getenv("HUGGINGFACE_HUB_CACHE", default_cache_path) +HUGGINGFACE_ASSETS_CACHE = os.getenv("HUGGINGFACE_ASSETS_CACHE", default_assets_cache_path) + +# New env variables +HF_HUB_CACHE = os.path.expandvars( + os.path.expanduser( + os.getenv( + "HF_HUB_CACHE", + HUGGINGFACE_HUB_CACHE, + ) + ) +) +HF_ASSETS_CACHE = os.path.expandvars( + os.path.expanduser( + os.getenv( + "HF_ASSETS_CACHE", + HUGGINGFACE_ASSETS_CACHE, + ) + ) +) + +HF_HUB_OFFLINE = _is_true(os.environ.get("HF_HUB_OFFLINE") or os.environ.get("TRANSFORMERS_OFFLINE")) + +# If set, log level will be set to DEBUG and all requests made to the Hub will be logged +# as curl commands for reproducibility. +HF_DEBUG = _is_true(os.environ.get("HF_DEBUG")) + +# Opt-out from telemetry requests +HF_HUB_DISABLE_TELEMETRY = ( + _is_true(os.environ.get("HF_HUB_DISABLE_TELEMETRY")) # HF-specific env variable + or _is_true(os.environ.get("DISABLE_TELEMETRY")) + or _is_true(os.environ.get("DO_NOT_TRACK")) # https://consoledonottrack.com/ +) + +HF_TOKEN_PATH = os.path.expandvars( + os.path.expanduser( + os.getenv( + "HF_TOKEN_PATH", + os.path.join(HF_HOME, "token"), + ) + ) +) +HF_STORED_TOKENS_PATH = os.path.join(os.path.dirname(HF_TOKEN_PATH), "stored_tokens") + +if _staging_mode: + # In staging mode, we use a different cache to ensure we don't mix up production and staging data or tokens + # In practice in `huggingface_hub` tests, we monkeypatch these values with temporary directories. The following + # lines are only used in third-party libraries tests (e.g. `transformers`, `diffusers`, etc.). + _staging_home = os.path.join(os.path.expanduser("~"), ".cache", "huggingface_staging") + HUGGINGFACE_HUB_CACHE = os.path.join(_staging_home, "hub") + HF_TOKEN_PATH = os.path.join(_staging_home, "token") + +# Here, `True` will disable progress bars globally without possibility of enabling it +# programmatically. `False` will enable them without possibility of disabling them. +# If environment variable is not set (None), then the user is free to enable/disable +# them programmatically. +# TL;DR: env variable has priority over code +__HF_HUB_DISABLE_PROGRESS_BARS = os.environ.get("HF_HUB_DISABLE_PROGRESS_BARS") +HF_HUB_DISABLE_PROGRESS_BARS: Optional[bool] = ( + _is_true(__HF_HUB_DISABLE_PROGRESS_BARS) if __HF_HUB_DISABLE_PROGRESS_BARS is not None else None +) + +# Disable warning on machines that do not support symlinks (e.g. Windows non-developer) +HF_HUB_DISABLE_SYMLINKS_WARNING: bool = _is_true(os.environ.get("HF_HUB_DISABLE_SYMLINKS_WARNING")) + +# Disable warning when using experimental features +HF_HUB_DISABLE_EXPERIMENTAL_WARNING: bool = _is_true(os.environ.get("HF_HUB_DISABLE_EXPERIMENTAL_WARNING")) + +# Disable sending the cached token by default is all HTTP requests to the Hub +HF_HUB_DISABLE_IMPLICIT_TOKEN: bool = _is_true(os.environ.get("HF_HUB_DISABLE_IMPLICIT_TOKEN")) + +# Enable fast-download using external dependency "hf_transfer" +# See: +# - https://pypi.org/project/hf-transfer/ +# - https://github.com/huggingface/hf_transfer (private) +HF_HUB_ENABLE_HF_TRANSFER: bool = _is_true(os.environ.get("HF_HUB_ENABLE_HF_TRANSFER")) + + +# UNUSED +# We don't use symlinks in local dir anymore. +HF_HUB_LOCAL_DIR_AUTO_SYMLINK_THRESHOLD: int = ( + _as_int(os.environ.get("HF_HUB_LOCAL_DIR_AUTO_SYMLINK_THRESHOLD")) or 5 * 1024 * 1024 +) + +# Used to override the etag timeout on a system level +HF_HUB_ETAG_TIMEOUT: int = _as_int(os.environ.get("HF_HUB_ETAG_TIMEOUT")) or DEFAULT_ETAG_TIMEOUT + +# Used to override the get request timeout on a system level +HF_HUB_DOWNLOAD_TIMEOUT: int = _as_int(os.environ.get("HF_HUB_DOWNLOAD_TIMEOUT")) or DEFAULT_DOWNLOAD_TIMEOUT + +# Allows to add information about the requester in the user-agent (eg. partner name) +HF_HUB_USER_AGENT_ORIGIN: Optional[str] = os.environ.get("HF_HUB_USER_AGENT_ORIGIN") + +# List frameworks that are handled by the InferenceAPI service. Useful to scan endpoints and check which models are +# deployed and running. Since 95% of the models are using the top 4 frameworks listed below, we scan only those by +# default. We still keep the full list of supported frameworks in case we want to scan all of them. +MAIN_INFERENCE_API_FRAMEWORKS = [ + "diffusers", + "sentence-transformers", + "text-generation-inference", + "transformers", +] + +ALL_INFERENCE_API_FRAMEWORKS = MAIN_INFERENCE_API_FRAMEWORKS + [ + "adapter-transformers", + "allennlp", + "asteroid", + "bertopic", + "doctr", + "espnet", + "fairseq", + "fastai", + "fasttext", + "flair", + "k2", + "keras", + "mindspore", + "nemo", + "open_clip", + "paddlenlp", + "peft", + "pyannote-audio", + "sklearn", + "spacy", + "span-marker", + "speechbrain", + "stanza", + "timm", +] + +# If OAuth didn't work after 2 redirects, there's likely a third-party cookie issue in the Space iframe view. +# In this case, we redirect the user to the non-iframe view. +OAUTH_MAX_REDIRECTS = 2 + +# OAuth-related environment variables injected by the Space +OAUTH_CLIENT_ID = os.environ.get("OAUTH_CLIENT_ID") +OAUTH_CLIENT_SECRET = os.environ.get("OAUTH_CLIENT_SECRET") +OAUTH_SCOPES = os.environ.get("OAUTH_SCOPES") +OPENID_PROVIDER_URL = os.environ.get("OPENID_PROVIDER_URL") + +# Xet constants +HUGGINGFACE_HEADER_X_XET_ENDPOINT = "X-Xet-Cas-Url" +HUGGINGFACE_HEADER_X_XET_ACCESS_TOKEN = "X-Xet-Access-Token" +HUGGINGFACE_HEADER_X_XET_EXPIRATION = "X-Xet-Token-Expiration" +HUGGINGFACE_HEADER_X_XET_HASH = "X-Xet-Hash" +HUGGINGFACE_HEADER_X_XET_REFRESH_ROUTE = "X-Xet-Refresh-Route" +HUGGINGFACE_HEADER_LINK_XET_AUTH_KEY = "xet-auth" + +default_xet_cache_path = os.path.join(HF_HOME, "xet") +HF_XET_CACHE = os.getenv("HF_XET_CACHE", default_xet_cache_path) +HF_HUB_DISABLE_XET: bool = _is_true(os.environ.get("HF_HUB_DISABLE_XET")) diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/dataclasses.py b/venv/lib/python3.13/site-packages/huggingface_hub/dataclasses.py new file mode 100644 index 0000000000000000000000000000000000000000..636a0ac64b327448e6f8f56b10add54528071f29 --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/dataclasses.py @@ -0,0 +1,484 @@ +import inspect +from dataclasses import _MISSING_TYPE, MISSING, Field, field, fields +from functools import wraps +from typing import ( + Any, + Callable, + Dict, + ForwardRef, + List, + Literal, + Optional, + Tuple, + Type, + TypeVar, + Union, + get_args, + get_origin, + overload, +) + +from .errors import ( + StrictDataclassClassValidationError, + StrictDataclassDefinitionError, + StrictDataclassFieldValidationError, +) + + +Validator_T = Callable[[Any], None] +T = TypeVar("T") + + +# The overload decorator helps type checkers understand the different return types +@overload +def strict(cls: Type[T]) -> Type[T]: ... + + +@overload +def strict(*, accept_kwargs: bool = False) -> Callable[[Type[T]], Type[T]]: ... + + +def strict( + cls: Optional[Type[T]] = None, *, accept_kwargs: bool = False +) -> Union[Type[T], Callable[[Type[T]], Type[T]]]: + """ + Decorator to add strict validation to a dataclass. + + This decorator must be used on top of `@dataclass` to ensure IDEs and static typing tools + recognize the class as a dataclass. + + Can be used with or without arguments: + - `@strict` + - `@strict(accept_kwargs=True)` + + Args: + cls: + The class to convert to a strict dataclass. + accept_kwargs (`bool`, *optional*): + If True, allows arbitrary keyword arguments in `__init__`. Defaults to False. + + Returns: + The enhanced dataclass with strict validation on field assignment. + + Example: + ```py + >>> from dataclasses import dataclass + >>> from huggingface_hub.dataclasses import as_validated_field, strict, validated_field + + >>> @as_validated_field + >>> def positive_int(value: int): + ... if not value >= 0: + ... raise ValueError(f"Value must be positive, got {value}") + + >>> @strict(accept_kwargs=True) + ... @dataclass + ... class User: + ... name: str + ... age: int = positive_int(default=10) + + # Initialize + >>> User(name="John") + User(name='John', age=10) + + # Extra kwargs are accepted + >>> User(name="John", age=30, lastname="Doe") + User(name='John', age=30, *lastname='Doe') + + # Invalid type => raises + >>> User(name="John", age="30") + huggingface_hub.errors.StrictDataclassFieldValidationError: Validation error for field 'age': + TypeError: Field 'age' expected int, got str (value: '30') + + # Invalid value => raises + >>> User(name="John", age=-1) + huggingface_hub.errors.StrictDataclassFieldValidationError: Validation error for field 'age': + ValueError: Value must be positive, got -1 + ``` + """ + + def wrap(cls: Type[T]) -> Type[T]: + if not hasattr(cls, "__dataclass_fields__"): + raise StrictDataclassDefinitionError( + f"Class '{cls.__name__}' must be a dataclass before applying @strict." + ) + + # List and store validators + field_validators: Dict[str, List[Validator_T]] = {} + for f in fields(cls): # type: ignore [arg-type] + validators = [] + validators.append(_create_type_validator(f)) + custom_validator = f.metadata.get("validator") + if custom_validator is not None: + if not isinstance(custom_validator, list): + custom_validator = [custom_validator] + for validator in custom_validator: + if not _is_validator(validator): + raise StrictDataclassDefinitionError( + f"Invalid validator for field '{f.name}': {validator}. Must be a callable taking a single argument." + ) + validators.extend(custom_validator) + field_validators[f.name] = validators + cls.__validators__ = field_validators # type: ignore + + # Override __setattr__ to validate fields on assignment + original_setattr = cls.__setattr__ + + def __strict_setattr__(self: Any, name: str, value: Any) -> None: + """Custom __setattr__ method for strict dataclasses.""" + # Run all validators + for validator in self.__validators__.get(name, []): + try: + validator(value) + except (ValueError, TypeError) as e: + raise StrictDataclassFieldValidationError(field=name, cause=e) from e + + # If validation passed, set the attribute + original_setattr(self, name, value) + + cls.__setattr__ = __strict_setattr__ # type: ignore[method-assign] + + if accept_kwargs: + # (optional) Override __init__ to accept arbitrary keyword arguments + original_init = cls.__init__ + + @wraps(original_init) + def __init__(self, **kwargs: Any) -> None: + # Extract only the fields that are part of the dataclass + dataclass_fields = {f.name for f in fields(cls)} # type: ignore [arg-type] + standard_kwargs = {k: v for k, v in kwargs.items() if k in dataclass_fields} + + # Call the original __init__ with standard fields + original_init(self, **standard_kwargs) + + # Add any additional kwargs as attributes + for name, value in kwargs.items(): + if name not in dataclass_fields: + self.__setattr__(name, value) + + cls.__init__ = __init__ # type: ignore[method-assign] + + # (optional) Override __repr__ to include additional kwargs + original_repr = cls.__repr__ + + @wraps(original_repr) + def __repr__(self) -> str: + # Call the original __repr__ to get the standard fields + standard_repr = original_repr(self) + + # Get additional kwargs + additional_kwargs = [ + # add a '*' in front of additional kwargs to let the user know they are not part of the dataclass + f"*{k}={v!r}" + for k, v in self.__dict__.items() + if k not in cls.__dataclass_fields__ # type: ignore [attr-defined] + ] + additional_repr = ", ".join(additional_kwargs) + + # Combine both representations + return f"{standard_repr[:-1]}, {additional_repr})" if additional_kwargs else standard_repr + + cls.__repr__ = __repr__ # type: ignore [method-assign] + + # List all public methods starting with `validate_` => class validators. + class_validators = [] + + for name in dir(cls): + if not name.startswith("validate_"): + continue + method = getattr(cls, name) + if not callable(method): + continue + if len(inspect.signature(method).parameters) != 1: + raise StrictDataclassDefinitionError( + f"Class '{cls.__name__}' has a class validator '{name}' that takes more than one argument." + " Class validators must take only 'self' as an argument. Methods starting with 'validate_'" + " are considered to be class validators." + ) + class_validators.append(method) + + cls.__class_validators__ = class_validators # type: ignore [attr-defined] + + # Add `validate` method to the class, but first check if it already exists + def validate(self: T) -> None: + """Run class validators on the instance.""" + for validator in cls.__class_validators__: # type: ignore [attr-defined] + try: + validator(self) + except (ValueError, TypeError) as e: + raise StrictDataclassClassValidationError(validator=validator.__name__, cause=e) from e + + # Hack to be able to raise if `.validate()` already exists except if it was created by this decorator on a parent class + # (in which case we just override it) + validate.__is_defined_by_strict_decorator__ = True # type: ignore [attr-defined] + + if hasattr(cls, "validate"): + if not getattr(cls.validate, "__is_defined_by_strict_decorator__", False): # type: ignore [attr-defined] + raise StrictDataclassDefinitionError( + f"Class '{cls.__name__}' already implements a method called 'validate'." + " This method name is reserved when using the @strict decorator on a dataclass." + " If you want to keep your own method, please rename it." + ) + + cls.validate = validate # type: ignore + + # Run class validators after initialization + initial_init = cls.__init__ + + @wraps(initial_init) + def init_with_validate(self, *args, **kwargs) -> None: + """Run class validators after initialization.""" + initial_init(self, *args, **kwargs) # type: ignore [call-arg] + cls.validate(self) # type: ignore [attr-defined] + + setattr(cls, "__init__", init_with_validate) + + return cls + + # Return wrapped class or the decorator itself + return wrap(cls) if cls is not None else wrap + + +def validated_field( + validator: Union[List[Validator_T], Validator_T], + default: Union[Any, _MISSING_TYPE] = MISSING, + default_factory: Union[Callable[[], Any], _MISSING_TYPE] = MISSING, + init: bool = True, + repr: bool = True, + hash: Optional[bool] = None, + compare: bool = True, + metadata: Optional[Dict] = None, + **kwargs: Any, +) -> Any: + """ + Create a dataclass field with a custom validator. + + Useful to apply several checks to a field. If only applying one rule, check out the [`as_validated_field`] decorator. + + Args: + validator (`Callable` or `List[Callable]`): + A method that takes a value as input and raises ValueError/TypeError if the value is invalid. + Can be a list of validators to apply multiple checks. + **kwargs: + Additional arguments to pass to `dataclasses.field()`. + + Returns: + A field with the validator attached in metadata + """ + if not isinstance(validator, list): + validator = [validator] + if metadata is None: + metadata = {} + metadata["validator"] = validator + return field( # type: ignore + default=default, # type: ignore [arg-type] + default_factory=default_factory, # type: ignore [arg-type] + init=init, + repr=repr, + hash=hash, + compare=compare, + metadata=metadata, + **kwargs, + ) + + +def as_validated_field(validator: Validator_T): + """ + Decorates a validator function as a [`validated_field`] (i.e. a dataclass field with a custom validator). + + Args: + validator (`Callable`): + A method that takes a value as input and raises ValueError/TypeError if the value is invalid. + """ + + def _inner( + default: Union[Any, _MISSING_TYPE] = MISSING, + default_factory: Union[Callable[[], Any], _MISSING_TYPE] = MISSING, + init: bool = True, + repr: bool = True, + hash: Optional[bool] = None, + compare: bool = True, + metadata: Optional[Dict] = None, + **kwargs: Any, + ): + return validated_field( + validator, + default=default, + default_factory=default_factory, + init=init, + repr=repr, + hash=hash, + compare=compare, + metadata=metadata, + **kwargs, + ) + + return _inner + + +def type_validator(name: str, value: Any, expected_type: Any) -> None: + """Validate that 'value' matches 'expected_type'.""" + origin = get_origin(expected_type) + args = get_args(expected_type) + + if expected_type is Any: + return + elif validator := _BASIC_TYPE_VALIDATORS.get(origin): + validator(name, value, args) + elif isinstance(expected_type, type): # simple types + _validate_simple_type(name, value, expected_type) + elif isinstance(expected_type, ForwardRef) or isinstance(expected_type, str): + return + else: + raise TypeError(f"Unsupported type for field '{name}': {expected_type}") + + +def _validate_union(name: str, value: Any, args: Tuple[Any, ...]) -> None: + """Validate that value matches one of the types in a Union.""" + errors = [] + for t in args: + try: + type_validator(name, value, t) + return # Valid if any type matches + except TypeError as e: + errors.append(str(e)) + + raise TypeError( + f"Field '{name}' with value {repr(value)} doesn't match any type in {args}. Errors: {'; '.join(errors)}" + ) + + +def _validate_literal(name: str, value: Any, args: Tuple[Any, ...]) -> None: + """Validate Literal type.""" + if value not in args: + raise TypeError(f"Field '{name}' expected one of {args}, got {value}") + + +def _validate_list(name: str, value: Any, args: Tuple[Any, ...]) -> None: + """Validate List[T] type.""" + if not isinstance(value, list): + raise TypeError(f"Field '{name}' expected a list, got {type(value).__name__}") + + # Validate each item in the list + item_type = args[0] + for i, item in enumerate(value): + try: + type_validator(f"{name}[{i}]", item, item_type) + except TypeError as e: + raise TypeError(f"Invalid item at index {i} in list '{name}'") from e + + +def _validate_dict(name: str, value: Any, args: Tuple[Any, ...]) -> None: + """Validate Dict[K, V] type.""" + if not isinstance(value, dict): + raise TypeError(f"Field '{name}' expected a dict, got {type(value).__name__}") + + # Validate keys and values + key_type, value_type = args + for k, v in value.items(): + try: + type_validator(f"{name}.key", k, key_type) + type_validator(f"{name}[{k!r}]", v, value_type) + except TypeError as e: + raise TypeError(f"Invalid key or value in dict '{name}'") from e + + +def _validate_tuple(name: str, value: Any, args: Tuple[Any, ...]) -> None: + """Validate Tuple type.""" + if not isinstance(value, tuple): + raise TypeError(f"Field '{name}' expected a tuple, got {type(value).__name__}") + + # Handle variable-length tuples: Tuple[T, ...] + if len(args) == 2 and args[1] is Ellipsis: + for i, item in enumerate(value): + try: + type_validator(f"{name}[{i}]", item, args[0]) + except TypeError as e: + raise TypeError(f"Invalid item at index {i} in tuple '{name}'") from e + # Handle fixed-length tuples: Tuple[T1, T2, ...] + elif len(args) != len(value): + raise TypeError(f"Field '{name}' expected a tuple of length {len(args)}, got {len(value)}") + else: + for i, (item, expected) in enumerate(zip(value, args)): + try: + type_validator(f"{name}[{i}]", item, expected) + except TypeError as e: + raise TypeError(f"Invalid item at index {i} in tuple '{name}'") from e + + +def _validate_set(name: str, value: Any, args: Tuple[Any, ...]) -> None: + """Validate Set[T] type.""" + if not isinstance(value, set): + raise TypeError(f"Field '{name}' expected a set, got {type(value).__name__}") + + # Validate each item in the set + item_type = args[0] + for i, item in enumerate(value): + try: + type_validator(f"{name} item", item, item_type) + except TypeError as e: + raise TypeError(f"Invalid item in set '{name}'") from e + + +def _validate_simple_type(name: str, value: Any, expected_type: type) -> None: + """Validate simple type (int, str, etc.).""" + if not isinstance(value, expected_type): + raise TypeError( + f"Field '{name}' expected {expected_type.__name__}, got {type(value).__name__} (value: {repr(value)})" + ) + + +def _create_type_validator(field: Field) -> Validator_T: + """Create a type validator function for a field.""" + # Hacky: we cannot use a lambda here because of reference issues + + def validator(value: Any) -> None: + type_validator(field.name, value, field.type) + + return validator + + +def _is_validator(validator: Any) -> bool: + """Check if a function is a validator. + + A validator is a Callable that can be called with a single positional argument. + The validator can have more arguments with default values. + + Basically, returns True if `validator(value)` is possible. + """ + if not callable(validator): + return False + + signature = inspect.signature(validator) + parameters = list(signature.parameters.values()) + if len(parameters) == 0: + return False + if parameters[0].kind not in ( + inspect.Parameter.POSITIONAL_OR_KEYWORD, + inspect.Parameter.POSITIONAL_ONLY, + inspect.Parameter.VAR_POSITIONAL, + ): + return False + for parameter in parameters[1:]: + if parameter.default == inspect.Parameter.empty: + return False + return True + + +_BASIC_TYPE_VALIDATORS = { + Union: _validate_union, + Literal: _validate_literal, + list: _validate_list, + dict: _validate_dict, + tuple: _validate_tuple, + set: _validate_set, +} + + +__all__ = [ + "strict", + "validated_field", + "Validator_T", + "StrictDataclassClassValidationError", + "StrictDataclassDefinitionError", + "StrictDataclassFieldValidationError", +] diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/errors.py b/venv/lib/python3.13/site-packages/huggingface_hub/errors.py new file mode 100644 index 0000000000000000000000000000000000000000..a0f7ed80e35a7cbe1dcc0f21dfa0354e467676f3 --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/errors.py @@ -0,0 +1,377 @@ +"""Contains all custom errors.""" + +from pathlib import Path +from typing import Optional, Union + +from requests import HTTPError, Response + + +# CACHE ERRORS + + +class CacheNotFound(Exception): + """Exception thrown when the Huggingface cache is not found.""" + + cache_dir: Union[str, Path] + + def __init__(self, msg: str, cache_dir: Union[str, Path], *args, **kwargs): + super().__init__(msg, *args, **kwargs) + self.cache_dir = cache_dir + + +class CorruptedCacheException(Exception): + """Exception for any unexpected structure in the Huggingface cache-system.""" + + +# HEADERS ERRORS + + +class LocalTokenNotFoundError(EnvironmentError): + """Raised if local token is required but not found.""" + + +# HTTP ERRORS + + +class OfflineModeIsEnabled(ConnectionError): + """Raised when a request is made but `HF_HUB_OFFLINE=1` is set as environment variable.""" + + +class HfHubHTTPError(HTTPError): + """ + HTTPError to inherit from for any custom HTTP Error raised in HF Hub. + + Any HTTPError is converted at least into a `HfHubHTTPError`. If some information is + sent back by the server, it will be added to the error message. + + Added details: + - Request id from "X-Request-Id" header if exists. If not, fallback to "X-Amzn-Trace-Id" header if exists. + - Server error message from the header "X-Error-Message". + - Server error message if we can found one in the response body. + + Example: + ```py + import requests + from huggingface_hub.utils import get_session, hf_raise_for_status, HfHubHTTPError + + response = get_session().post(...) + try: + hf_raise_for_status(response) + except HfHubHTTPError as e: + print(str(e)) # formatted message + e.request_id, e.server_message # details returned by server + + # Complete the error message with additional information once it's raised + e.append_to_message("\n`create_commit` expects the repository to exist.") + raise + ``` + """ + + def __init__(self, message: str, response: Optional[Response] = None, *, server_message: Optional[str] = None): + self.request_id = ( + response.headers.get("x-request-id") or response.headers.get("X-Amzn-Trace-Id") + if response is not None + else None + ) + self.server_message = server_message + + super().__init__( + message, + response=response, # type: ignore [arg-type] + request=response.request if response is not None else None, # type: ignore [arg-type] + ) + + def append_to_message(self, additional_message: str) -> None: + """Append additional information to the `HfHubHTTPError` initial message.""" + self.args = (self.args[0] + additional_message,) + self.args[1:] + + +# INFERENCE CLIENT ERRORS + + +class InferenceTimeoutError(HTTPError, TimeoutError): + """Error raised when a model is unavailable or the request times out.""" + + +# INFERENCE ENDPOINT ERRORS + + +class InferenceEndpointError(Exception): + """Generic exception when dealing with Inference Endpoints.""" + + +class InferenceEndpointTimeoutError(InferenceEndpointError, TimeoutError): + """Exception for timeouts while waiting for Inference Endpoint.""" + + +# SAFETENSORS ERRORS + + +class SafetensorsParsingError(Exception): + """Raised when failing to parse a safetensors file metadata. + + This can be the case if the file is not a safetensors file or does not respect the specification. + """ + + +class NotASafetensorsRepoError(Exception): + """Raised when a repo is not a Safetensors repo i.e. doesn't have either a `model.safetensors` or a + `model.safetensors.index.json` file. + """ + + +# TEXT GENERATION ERRORS + + +class TextGenerationError(HTTPError): + """Generic error raised if text-generation went wrong.""" + + +# Text Generation Inference Errors +class ValidationError(TextGenerationError): + """Server-side validation error.""" + + +class GenerationError(TextGenerationError): + pass + + +class OverloadedError(TextGenerationError): + pass + + +class IncompleteGenerationError(TextGenerationError): + pass + + +class UnknownError(TextGenerationError): + pass + + +# VALIDATION ERRORS + + +class HFValidationError(ValueError): + """Generic exception thrown by `huggingface_hub` validators. + + Inherits from [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError). + """ + + +# FILE METADATA ERRORS + + +class FileMetadataError(OSError): + """Error triggered when the metadata of a file on the Hub cannot be retrieved (missing ETag or commit_hash). + + Inherits from `OSError` for backward compatibility. + """ + + +# REPOSITORY ERRORS + + +class RepositoryNotFoundError(HfHubHTTPError): + """ + Raised when trying to access a hf.co URL with an invalid repository name, or + with a private repo name the user does not have access to. + + Example: + + ```py + >>> from huggingface_hub import model_info + >>> model_info("") + (...) + huggingface_hub.utils._errors.RepositoryNotFoundError: 401 Client Error. (Request ID: PvMw_VjBMjVdMz53WKIzP) + + Repository Not Found for url: https://huggingface.co/api/models/%3Cnon_existent_repository%3E. + Please make sure you specified the correct `repo_id` and `repo_type`. + If the repo is private, make sure you are authenticated. + Invalid username or password. + ``` + """ + + +class GatedRepoError(RepositoryNotFoundError): + """ + Raised when trying to access a gated repository for which the user is not on the + authorized list. + + Note: derives from `RepositoryNotFoundError` to ensure backward compatibility. + + Example: + + ```py + >>> from huggingface_hub import model_info + >>> model_info("") + (...) + huggingface_hub.utils._errors.GatedRepoError: 403 Client Error. (Request ID: ViT1Bf7O_026LGSQuVqfa) + + Cannot access gated repo for url https://huggingface.co/api/models/ardent-figment/gated-model. + Access to model ardent-figment/gated-model is restricted and you are not in the authorized list. + Visit https://huggingface.co/ardent-figment/gated-model to ask for access. + ``` + """ + + +class DisabledRepoError(HfHubHTTPError): + """ + Raised when trying to access a repository that has been disabled by its author. + + Example: + + ```py + >>> from huggingface_hub import dataset_info + >>> dataset_info("laion/laion-art") + (...) + huggingface_hub.utils._errors.DisabledRepoError: 403 Client Error. (Request ID: Root=1-659fc3fa-3031673e0f92c71a2260dbe2;bc6f4dfb-b30a-4862-af0a-5cfe827610d8) + + Cannot access repository for url https://huggingface.co/api/datasets/laion/laion-art. + Access to this resource is disabled. + ``` + """ + + +# REVISION ERROR + + +class RevisionNotFoundError(HfHubHTTPError): + """ + Raised when trying to access a hf.co URL with a valid repository but an invalid + revision. + + Example: + + ```py + >>> from huggingface_hub import hf_hub_download + >>> hf_hub_download('bert-base-cased', 'config.json', revision='') + (...) + huggingface_hub.utils._errors.RevisionNotFoundError: 404 Client Error. (Request ID: Mwhe_c3Kt650GcdKEFomX) + + Revision Not Found for url: https://huggingface.co/bert-base-cased/resolve/%3Cnon-existent-revision%3E/config.json. + ``` + """ + + +# ENTRY ERRORS +class EntryNotFoundError(HfHubHTTPError): + """ + Raised when trying to access a hf.co URL with a valid repository and revision + but an invalid filename. + + Example: + + ```py + >>> from huggingface_hub import hf_hub_download + >>> hf_hub_download('bert-base-cased', '') + (...) + huggingface_hub.utils._errors.EntryNotFoundError: 404 Client Error. (Request ID: 53pNl6M0MxsnG5Sw8JA6x) + + Entry Not Found for url: https://huggingface.co/bert-base-cased/resolve/main/%3Cnon-existent-file%3E. + ``` + """ + + +class LocalEntryNotFoundError(EntryNotFoundError, FileNotFoundError, ValueError): + """ + Raised when trying to access a file or snapshot that is not on the disk when network is + disabled or unavailable (connection issue). The entry may exist on the Hub. + + Note: `ValueError` type is to ensure backward compatibility. + Note: `LocalEntryNotFoundError` derives from `HTTPError` because of `EntryNotFoundError` + even when it is not a network issue. + + Example: + + ```py + >>> from huggingface_hub import hf_hub_download + >>> hf_hub_download('bert-base-cased', '', local_files_only=True) + (...) + huggingface_hub.utils._errors.LocalEntryNotFoundError: Cannot find the requested files in the disk cache and outgoing traffic has been disabled. To enable hf.co look-ups and downloads online, set 'local_files_only' to False. + ``` + """ + + def __init__(self, message: str): + super().__init__(message, response=None) + + +# REQUEST ERROR +class BadRequestError(HfHubHTTPError, ValueError): + """ + Raised by `hf_raise_for_status` when the server returns a HTTP 400 error. + + Example: + + ```py + >>> resp = requests.post("hf.co/api/check", ...) + >>> hf_raise_for_status(resp, endpoint_name="check") + huggingface_hub.utils._errors.BadRequestError: Bad request for check endpoint: {details} (Request ID: XXX) + ``` + """ + + +# DDUF file format ERROR + + +class DDUFError(Exception): + """Base exception for errors related to the DDUF format.""" + + +class DDUFCorruptedFileError(DDUFError): + """Exception thrown when the DDUF file is corrupted.""" + + +class DDUFExportError(DDUFError): + """Base exception for errors during DDUF export.""" + + +class DDUFInvalidEntryNameError(DDUFExportError): + """Exception thrown when the entry name is invalid.""" + + +# STRICT DATACLASSES ERRORS + + +class StrictDataclassError(Exception): + """Base exception for strict dataclasses.""" + + +class StrictDataclassDefinitionError(StrictDataclassError): + """Exception thrown when a strict dataclass is defined incorrectly.""" + + +class StrictDataclassFieldValidationError(StrictDataclassError): + """Exception thrown when a strict dataclass fails validation for a given field.""" + + def __init__(self, field: str, cause: Exception): + error_message = f"Validation error for field '{field}':" + error_message += f"\n {cause.__class__.__name__}: {cause}" + super().__init__(error_message) + + +class StrictDataclassClassValidationError(StrictDataclassError): + """Exception thrown when a strict dataclass fails validation on a class validator.""" + + def __init__(self, validator: str, cause: Exception): + error_message = f"Class validation error for validator '{validator}':" + error_message += f"\n {cause.__class__.__name__}: {cause}" + super().__init__(error_message) + + +# XET ERRORS + + +class XetError(Exception): + """Base exception for errors related to Xet Storage.""" + + +class XetAuthorizationError(XetError): + """Exception thrown when the user does not have the right authorization to use Xet Storage.""" + + +class XetRefreshTokenError(XetError): + """Exception thrown when the refresh token is invalid.""" + + +class XetDownloadError(Exception): + """Exception thrown when the download from Xet Storage fails.""" diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/fastai_utils.py b/venv/lib/python3.13/site-packages/huggingface_hub/fastai_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..fc3b42323a251140aac813da24493918be267472 --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/fastai_utils.py @@ -0,0 +1,415 @@ +import json +import os +from pathlib import Path +from pickle import DEFAULT_PROTOCOL, PicklingError +from typing import Any, Dict, List, Optional, Union + +from packaging import version + +from huggingface_hub import constants, snapshot_download +from huggingface_hub.hf_api import HfApi +from huggingface_hub.utils import ( + SoftTemporaryDirectory, + get_fastai_version, + get_fastcore_version, + get_python_version, +) + +from .utils import logging, validate_hf_hub_args +from .utils._runtime import _PY_VERSION # noqa: F401 # for backward compatibility... + + +logger = logging.get_logger(__name__) + + +def _check_fastai_fastcore_versions( + fastai_min_version: str = "2.4", + fastcore_min_version: str = "1.3.27", +): + """ + Checks that the installed fastai and fastcore versions are compatible for pickle serialization. + + Args: + fastai_min_version (`str`, *optional*): + The minimum fastai version supported. + fastcore_min_version (`str`, *optional*): + The minimum fastcore version supported. + + > [!TIP] + > Raises the following error: + > + > - [`ImportError`](https://docs.python.org/3/library/exceptions.html#ImportError) + > if the fastai or fastcore libraries are not available or are of an invalid version. + """ + + if (get_fastcore_version() or get_fastai_version()) == "N/A": + raise ImportError( + f"fastai>={fastai_min_version} and fastcore>={fastcore_min_version} are" + f" required. Currently using fastai=={get_fastai_version()} and" + f" fastcore=={get_fastcore_version()}." + ) + + current_fastai_version = version.Version(get_fastai_version()) + current_fastcore_version = version.Version(get_fastcore_version()) + + if current_fastai_version < version.Version(fastai_min_version): + raise ImportError( + "`push_to_hub_fastai` and `from_pretrained_fastai` require a" + f" fastai>={fastai_min_version} version, but you are using fastai version" + f" {get_fastai_version()} which is incompatible. Upgrade with `pip install" + " fastai==2.5.6`." + ) + + if current_fastcore_version < version.Version(fastcore_min_version): + raise ImportError( + "`push_to_hub_fastai` and `from_pretrained_fastai` require a" + f" fastcore>={fastcore_min_version} version, but you are using fastcore" + f" version {get_fastcore_version()} which is incompatible. Upgrade with" + " `pip install fastcore==1.3.27`." + ) + + +def _check_fastai_fastcore_pyproject_versions( + storage_folder: str, + fastai_min_version: str = "2.4", + fastcore_min_version: str = "1.3.27", +): + """ + Checks that the `pyproject.toml` file in the directory `storage_folder` has fastai and fastcore versions + that are compatible with `from_pretrained_fastai` and `push_to_hub_fastai`. If `pyproject.toml` does not exist + or does not contain versions for fastai and fastcore, then it logs a warning. + + Args: + storage_folder (`str`): + Folder to look for the `pyproject.toml` file. + fastai_min_version (`str`, *optional*): + The minimum fastai version supported. + fastcore_min_version (`str`, *optional*): + The minimum fastcore version supported. + + > [!TIP] + > Raises the following errors: + > + > - [`ImportError`](https://docs.python.org/3/library/exceptions.html#ImportError) + > if the `toml` module is not installed. + > - [`ImportError`](https://docs.python.org/3/library/exceptions.html#ImportError) + > if the `pyproject.toml` indicates a lower than minimum supported version of fastai or fastcore. + """ + + try: + import toml + except ModuleNotFoundError: + raise ImportError( + "`push_to_hub_fastai` and `from_pretrained_fastai` require the toml module." + " Install it with `pip install toml`." + ) + + # Checks that a `pyproject.toml`, with `build-system` and `requires` sections, exists in the repository. If so, get a list of required packages. + if not os.path.isfile(f"{storage_folder}/pyproject.toml"): + logger.warning( + "There is no `pyproject.toml` in the repository that contains the fastai" + " `Learner`. The `pyproject.toml` would allow us to verify that your fastai" + " and fastcore versions are compatible with those of the model you want to" + " load." + ) + return + pyproject_toml = toml.load(f"{storage_folder}/pyproject.toml") + + if "build-system" not in pyproject_toml.keys(): + logger.warning( + "There is no `build-system` section in the pyproject.toml of the repository" + " that contains the fastai `Learner`. The `build-system` would allow us to" + " verify that your fastai and fastcore versions are compatible with those" + " of the model you want to load." + ) + return + build_system_toml = pyproject_toml["build-system"] + + if "requires" not in build_system_toml.keys(): + logger.warning( + "There is no `requires` section in the pyproject.toml of the repository" + " that contains the fastai `Learner`. The `requires` would allow us to" + " verify that your fastai and fastcore versions are compatible with those" + " of the model you want to load." + ) + return + package_versions = build_system_toml["requires"] + + # Extracts contains fastai and fastcore versions from `pyproject.toml` if available. + # If the package is specified but not the version (e.g. "fastai" instead of "fastai=2.4"), the default versions are the highest. + fastai_packages = [pck for pck in package_versions if pck.startswith("fastai")] + if len(fastai_packages) == 0: + logger.warning("The repository does not have a fastai version specified in the `pyproject.toml`.") + # fastai_version is an empty string if not specified + else: + fastai_version = str(fastai_packages[0]).partition("=")[2] + if fastai_version != "" and version.Version(fastai_version) < version.Version(fastai_min_version): + raise ImportError( + "`from_pretrained_fastai` requires" + f" fastai>={fastai_min_version} version but the model to load uses" + f" {fastai_version} which is incompatible." + ) + + fastcore_packages = [pck for pck in package_versions if pck.startswith("fastcore")] + if len(fastcore_packages) == 0: + logger.warning("The repository does not have a fastcore version specified in the `pyproject.toml`.") + # fastcore_version is an empty string if not specified + else: + fastcore_version = str(fastcore_packages[0]).partition("=")[2] + if fastcore_version != "" and version.Version(fastcore_version) < version.Version(fastcore_min_version): + raise ImportError( + "`from_pretrained_fastai` requires" + f" fastcore>={fastcore_min_version} version, but you are using fastcore" + f" version {fastcore_version} which is incompatible." + ) + + +README_TEMPLATE = """--- +tags: +- fastai +--- + +# Amazing! + +🥳 Congratulations on hosting your fastai model on the Hugging Face Hub! + +# Some next steps +1. Fill out this model card with more information (see the template below and the [documentation here](https://huggingface.co/docs/hub/model-repos))! + +2. Create a demo in Gradio or Streamlit using 🤗 Spaces ([documentation here](https://huggingface.co/docs/hub/spaces)). + +3. Join the fastai community on the [Fastai Discord](https://discord.com/invite/YKrxeNn)! + +Greetings fellow fastlearner 🤝! Don't forget to delete this content from your model card. + + +--- + + +# Model card + +## Model description +More information needed + +## Intended uses & limitations +More information needed + +## Training and evaluation data +More information needed +""" + +PYPROJECT_TEMPLATE = f"""[build-system] +requires = ["setuptools>=40.8.0", "wheel", "python={get_python_version()}", "fastai={get_fastai_version()}", "fastcore={get_fastcore_version()}"] +build-backend = "setuptools.build_meta:__legacy__" +""" + + +def _create_model_card(repo_dir: Path): + """ + Creates a model card for the repository. + + Args: + repo_dir (`Path`): + Directory where model card is created. + """ + readme_path = repo_dir / "README.md" + + if not readme_path.exists(): + with readme_path.open("w", encoding="utf-8") as f: + f.write(README_TEMPLATE) + + +def _create_model_pyproject(repo_dir: Path): + """ + Creates a `pyproject.toml` for the repository. + + Args: + repo_dir (`Path`): + Directory where `pyproject.toml` is created. + """ + pyproject_path = repo_dir / "pyproject.toml" + + if not pyproject_path.exists(): + with pyproject_path.open("w", encoding="utf-8") as f: + f.write(PYPROJECT_TEMPLATE) + + +def _save_pretrained_fastai( + learner, + save_directory: Union[str, Path], + config: Optional[Dict[str, Any]] = None, +): + """ + Saves a fastai learner to `save_directory` in pickle format using the default pickle protocol for the version of python used. + + Args: + learner (`Learner`): + The `fastai.Learner` you'd like to save. + save_directory (`str` or `Path`): + Specific directory in which you want to save the fastai learner. + config (`dict`, *optional*): + Configuration object. Will be uploaded as a .json file. Example: 'https://huggingface.co/espejelomar/fastai-pet-breeds-classification/blob/main/config.json'. + + > [!TIP] + > Raises the following error: + > + > - [`RuntimeError`](https://docs.python.org/3/library/exceptions.html#RuntimeError) + > if the config file provided is not a dictionary. + """ + _check_fastai_fastcore_versions() + + os.makedirs(save_directory, exist_ok=True) + + # if the user provides config then we update it with the fastai and fastcore versions in CONFIG_TEMPLATE. + if config is not None: + if not isinstance(config, dict): + raise RuntimeError(f"Provided config should be a dict. Got: '{type(config)}'") + path = os.path.join(save_directory, constants.CONFIG_NAME) + with open(path, "w") as f: + json.dump(config, f) + + _create_model_card(Path(save_directory)) + _create_model_pyproject(Path(save_directory)) + + # learner.export saves the model in `self.path`. + learner.path = Path(save_directory) + os.makedirs(save_directory, exist_ok=True) + try: + learner.export( + fname="model.pkl", + pickle_protocol=DEFAULT_PROTOCOL, + ) + except PicklingError: + raise PicklingError( + "You are using a lambda function, i.e., an anonymous function. `pickle`" + " cannot pickle function objects and requires that all functions have" + " names. One possible solution is to name the function." + ) + + +@validate_hf_hub_args +def from_pretrained_fastai( + repo_id: str, + revision: Optional[str] = None, +): + """ + Load pretrained fastai model from the Hub or from a local directory. + + Args: + repo_id (`str`): + The location where the pickled fastai.Learner is. It can be either of the two: + - Hosted on the Hugging Face Hub. E.g.: 'espejelomar/fatai-pet-breeds-classification' or 'distilgpt2'. + You can add a `revision` by appending `@` at the end of `repo_id`. E.g.: `dbmdz/bert-base-german-cased@main`. + Revision is the specific model version to use. Since we use a git-based system for storing models and other + artifacts on the Hugging Face Hub, it can be a branch name, a tag name, or a commit id. + - Hosted locally. `repo_id` would be a directory containing the pickle and a pyproject.toml + indicating the fastai and fastcore versions used to build the `fastai.Learner`. E.g.: `./my_model_directory/`. + revision (`str`, *optional*): + Revision at which the repo's files are downloaded. See documentation of `snapshot_download`. + + Returns: + The `fastai.Learner` model in the `repo_id` repo. + """ + _check_fastai_fastcore_versions() + + # Load the `repo_id` repo. + # `snapshot_download` returns the folder where the model was stored. + # `cache_dir` will be the default '/root/.cache/huggingface/hub' + if not os.path.isdir(repo_id): + storage_folder = snapshot_download( + repo_id=repo_id, + revision=revision, + library_name="fastai", + library_version=get_fastai_version(), + ) + else: + storage_folder = repo_id + + _check_fastai_fastcore_pyproject_versions(storage_folder) + + from fastai.learner import load_learner # type: ignore + + return load_learner(os.path.join(storage_folder, "model.pkl")) + + +@validate_hf_hub_args +def push_to_hub_fastai( + learner, + *, + repo_id: str, + commit_message: str = "Push FastAI model using huggingface_hub.", + private: Optional[bool] = None, + token: Optional[str] = None, + config: Optional[dict] = None, + branch: Optional[str] = None, + create_pr: Optional[bool] = None, + allow_patterns: Optional[Union[List[str], str]] = None, + ignore_patterns: Optional[Union[List[str], str]] = None, + delete_patterns: Optional[Union[List[str], str]] = None, + api_endpoint: Optional[str] = None, +): + """ + Upload learner checkpoint files to the Hub. + + Use `allow_patterns` and `ignore_patterns` to precisely filter which files should be pushed to the hub. Use + `delete_patterns` to delete existing remote files in the same commit. See [`upload_folder`] reference for more + details. + + Args: + learner (`Learner`): + The `fastai.Learner' you'd like to push to the Hub. + repo_id (`str`): + The repository id for your model in Hub in the format of "namespace/repo_name". The namespace can be your individual account or an organization to which you have write access (for example, 'stanfordnlp/stanza-de'). + commit_message (`str`, *optional*): + Message to commit while pushing. Will default to :obj:`"add model"`. + private (`bool`, *optional*): + Whether or not the repository created should be private. + If `None` (default), will default to been public except if the organization's default is private. + token (`str`, *optional*): + The Hugging Face account token to use as HTTP bearer authorization for remote files. If :obj:`None`, the token will be asked by a prompt. + config (`dict`, *optional*): + Configuration object to be saved alongside the model weights. + branch (`str`, *optional*): + The git branch on which to push the model. This defaults to + the default branch as specified in your repository, which + defaults to `"main"`. + create_pr (`boolean`, *optional*): + Whether or not to create a Pull Request from `branch` with that commit. + Defaults to `False`. + api_endpoint (`str`, *optional*): + The API endpoint to use when pushing the model to the hub. + allow_patterns (`List[str]` or `str`, *optional*): + If provided, only files matching at least one pattern are pushed. + ignore_patterns (`List[str]` or `str`, *optional*): + If provided, files matching any of the patterns are not pushed. + delete_patterns (`List[str]` or `str`, *optional*): + If provided, remote files matching any of the patterns will be deleted from the repo. + + Returns: + The url of the commit of your model in the given repository. + + > [!TIP] + > Raises the following error: + > + > - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + > if the user is not log on to the Hugging Face Hub. + """ + _check_fastai_fastcore_versions() + api = HfApi(endpoint=api_endpoint) + repo_id = api.create_repo(repo_id=repo_id, token=token, private=private, exist_ok=True).repo_id + + # Push the files to the repo in a single commit + with SoftTemporaryDirectory() as tmp: + saved_path = Path(tmp) / repo_id + _save_pretrained_fastai(learner, saved_path, config=config) + return api.upload_folder( + repo_id=repo_id, + token=token, + folder_path=saved_path, + commit_message=commit_message, + revision=branch, + create_pr=create_pr, + allow_patterns=allow_patterns, + ignore_patterns=ignore_patterns, + delete_patterns=delete_patterns, + ) diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/file_download.py b/venv/lib/python3.13/site-packages/huggingface_hub/file_download.py new file mode 100644 index 0000000000000000000000000000000000000000..2cea2da453f8ce5cbb8f3810faeb0d69cf42eba5 --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/file_download.py @@ -0,0 +1,1813 @@ +import copy +import errno +import inspect +import os +import re +import shutil +import stat +import time +import uuid +import warnings +from dataclasses import dataclass +from pathlib import Path +from typing import Any, BinaryIO, Dict, Literal, NoReturn, Optional, Tuple, Union +from urllib.parse import quote, urlparse + +import requests + +from . import ( + __version__, # noqa: F401 # for backward compatibility + constants, +) +from ._local_folder import get_local_download_paths, read_download_metadata, write_download_metadata +from .constants import ( + HUGGINGFACE_CO_URL_TEMPLATE, # noqa: F401 # for backward compatibility + HUGGINGFACE_HUB_CACHE, # noqa: F401 # for backward compatibility +) +from .errors import ( + EntryNotFoundError, + FileMetadataError, + GatedRepoError, + HfHubHTTPError, + LocalEntryNotFoundError, + RepositoryNotFoundError, + RevisionNotFoundError, +) +from .utils import ( + OfflineModeIsEnabled, + SoftTemporaryDirectory, + WeakFileLock, + XetFileData, + build_hf_headers, + get_fastai_version, # noqa: F401 # for backward compatibility + get_fastcore_version, # noqa: F401 # for backward compatibility + get_graphviz_version, # noqa: F401 # for backward compatibility + get_jinja_version, # noqa: F401 # for backward compatibility + get_pydot_version, # noqa: F401 # for backward compatibility + get_tf_version, # noqa: F401 # for backward compatibility + get_torch_version, # noqa: F401 # for backward compatibility + hf_raise_for_status, + is_fastai_available, # noqa: F401 # for backward compatibility + is_fastcore_available, # noqa: F401 # for backward compatibility + is_graphviz_available, # noqa: F401 # for backward compatibility + is_jinja_available, # noqa: F401 # for backward compatibility + is_pydot_available, # noqa: F401 # for backward compatibility + is_tf_available, # noqa: F401 # for backward compatibility + is_torch_available, # noqa: F401 # for backward compatibility + logging, + parse_xet_file_data_from_response, + refresh_xet_connection_info, + reset_sessions, + tqdm, + validate_hf_hub_args, +) +from .utils._http import _adjust_range_header, http_backoff +from .utils._runtime import _PY_VERSION, is_xet_available # noqa: F401 # for backward compatibility +from .utils._typing import HTTP_METHOD_T +from .utils.sha import sha_fileobj +from .utils.tqdm import _get_progress_bar_context + + +logger = logging.get_logger(__name__) + +# Return value when trying to load a file from cache but the file does not exist in the distant repo. +_CACHED_NO_EXIST = object() +_CACHED_NO_EXIST_T = Any + +# Regex to get filename from a "Content-Disposition" header for CDN-served files +HEADER_FILENAME_PATTERN = re.compile(r'filename="(?P.*?)";') + +# Regex to check if the revision IS directly a commit_hash +REGEX_COMMIT_HASH = re.compile(r"^[0-9a-f]{40}$") + +# Regex to check if the file etag IS a valid sha256 +REGEX_SHA256 = re.compile(r"^[0-9a-f]{64}$") + +_are_symlinks_supported_in_dir: Dict[str, bool] = {} + + +def are_symlinks_supported(cache_dir: Union[str, Path, None] = None) -> bool: + """Return whether the symlinks are supported on the machine. + + Since symlinks support can change depending on the mounted disk, we need to check + on the precise cache folder. By default, the default HF cache directory is checked. + + Args: + cache_dir (`str`, `Path`, *optional*): + Path to the folder where cached files are stored. + + Returns: [bool] Whether symlinks are supported in the directory. + """ + # Defaults to HF cache + if cache_dir is None: + cache_dir = constants.HF_HUB_CACHE + cache_dir = str(Path(cache_dir).expanduser().resolve()) # make it unique + + # Check symlink compatibility only once (per cache directory) at first time use + if cache_dir not in _are_symlinks_supported_in_dir: + _are_symlinks_supported_in_dir[cache_dir] = True + + os.makedirs(cache_dir, exist_ok=True) + with SoftTemporaryDirectory(dir=cache_dir) as tmpdir: + src_path = Path(tmpdir) / "dummy_file_src" + src_path.touch() + dst_path = Path(tmpdir) / "dummy_file_dst" + + # Relative source path as in `_create_symlink`` + relative_src = os.path.relpath(src_path, start=os.path.dirname(dst_path)) + try: + os.symlink(relative_src, dst_path) + except OSError: + # Likely running on Windows + _are_symlinks_supported_in_dir[cache_dir] = False + + if not constants.HF_HUB_DISABLE_SYMLINKS_WARNING: + message = ( + "`huggingface_hub` cache-system uses symlinks by default to" + " efficiently store duplicated files but your machine does not" + f" support them in {cache_dir}. Caching files will still work" + " but in a degraded version that might require more space on" + " your disk. This warning can be disabled by setting the" + " `HF_HUB_DISABLE_SYMLINKS_WARNING` environment variable. For" + " more details, see" + " https://huggingface.co/docs/huggingface_hub/how-to-cache#limitations." + ) + if os.name == "nt": + message += ( + "\nTo support symlinks on Windows, you either need to" + " activate Developer Mode or to run Python as an" + " administrator. In order to activate developer mode," + " see this article:" + " https://docs.microsoft.com/en-us/windows/apps/get-started/enable-your-device-for-development" + ) + warnings.warn(message) + + return _are_symlinks_supported_in_dir[cache_dir] + + +@dataclass(frozen=True) +class HfFileMetadata: + """Data structure containing information about a file versioned on the Hub. + + Returned by [`get_hf_file_metadata`] based on a URL. + + Args: + commit_hash (`str`, *optional*): + The commit_hash related to the file. + etag (`str`, *optional*): + Etag of the file on the server. + location (`str`): + Location where to download the file. Can be a Hub url or not (CDN). + size (`size`): + Size of the file. In case of an LFS file, contains the size of the actual + LFS file, not the pointer. + xet_file_data (`XetFileData`, *optional*): + Xet information for the file. This is only set if the file is stored using Xet storage. + """ + + commit_hash: Optional[str] + etag: Optional[str] + location: str + size: Optional[int] + xet_file_data: Optional[XetFileData] + + +@validate_hf_hub_args +def hf_hub_url( + repo_id: str, + filename: str, + *, + subfolder: Optional[str] = None, + repo_type: Optional[str] = None, + revision: Optional[str] = None, + endpoint: Optional[str] = None, +) -> str: + """Construct the URL of a file from the given information. + + The resolved address can either be a huggingface.co-hosted url, or a link to + Cloudfront (a Content Delivery Network, or CDN) for large files which are + more than a few MBs. + + Args: + repo_id (`str`): + A namespace (user or an organization) name and a repo name separated + by a `/`. + filename (`str`): + The name of the file in the repo. + subfolder (`str`, *optional*): + An optional value corresponding to a folder inside the repo. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if downloading from a dataset or space, + `None` or `"model"` if downloading from a model. Default is `None`. + revision (`str`, *optional*): + An optional Git revision id which can be a branch name, a tag, or a + commit hash. + + Example: + + ```python + >>> from huggingface_hub import hf_hub_url + + >>> hf_hub_url( + ... repo_id="julien-c/EsperBERTo-small", filename="pytorch_model.bin" + ... ) + 'https://huggingface.co/julien-c/EsperBERTo-small/resolve/main/pytorch_model.bin' + ``` + + > [!TIP] + > Notes: + > + > Cloudfront is replicated over the globe so downloads are way faster for + > the end user (and it also lowers our bandwidth costs). + > + > Cloudfront aggressively caches files by default (default TTL is 24 + > hours), however this is not an issue here because we implement a + > git-based versioning system on huggingface.co, which means that we store + > the files on S3/Cloudfront in a content-addressable way (i.e., the file + > name is its hash). Using content-addressable filenames means cache can't + > ever be stale. + > + > In terms of client-side caching from this library, we base our caching + > on the objects' entity tag (`ETag`), which is an identifier of a + > specific version of a resource [1]_. An object's ETag is: its git-sha1 + > if stored in git, or its sha256 if stored in git-lfs. + + References: + + - [1] https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/ETag + """ + if subfolder == "": + subfolder = None + if subfolder is not None: + filename = f"{subfolder}/{filename}" + + if repo_type not in constants.REPO_TYPES: + raise ValueError("Invalid repo type") + + if repo_type in constants.REPO_TYPES_URL_PREFIXES: + repo_id = constants.REPO_TYPES_URL_PREFIXES[repo_type] + repo_id + + if revision is None: + revision = constants.DEFAULT_REVISION + url = HUGGINGFACE_CO_URL_TEMPLATE.format( + repo_id=repo_id, revision=quote(revision, safe=""), filename=quote(filename) + ) + # Update endpoint if provided + if endpoint is not None and url.startswith(constants.ENDPOINT): + url = endpoint + url[len(constants.ENDPOINT) :] + return url + + +def _request_wrapper( + method: HTTP_METHOD_T, url: str, *, follow_relative_redirects: bool = False, **params +) -> requests.Response: + """Wrapper around requests methods to follow relative redirects if `follow_relative_redirects=True` even when + `allow_redirection=False`. + + A backoff mechanism retries the HTTP call on 5xx errors and network errors. + + Args: + method (`str`): + HTTP method, such as 'GET' or 'HEAD'. + url (`str`): + The URL of the resource to fetch. + follow_relative_redirects (`bool`, *optional*, defaults to `False`) + If True, relative redirection (redirection to the same site) will be resolved even when `allow_redirection` + kwarg is set to False. Useful when we want to follow a redirection to a renamed repository without + following redirection to a CDN. + **params (`dict`, *optional*): + Params to pass to `requests.request`. + """ + # Recursively follow relative redirects + if follow_relative_redirects: + response = _request_wrapper( + method=method, + url=url, + follow_relative_redirects=False, + **params, + ) + + # If redirection, we redirect only relative paths. + # This is useful in case of a renamed repository. + if 300 <= response.status_code <= 399: + parsed_target = urlparse(response.headers["Location"]) + if parsed_target.netloc == "": + # This means it is a relative 'location' headers, as allowed by RFC 7231. + # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') + # We want to follow this relative redirect ! + # + # Highly inspired by `resolve_redirects` from requests library. + # See https://github.com/psf/requests/blob/main/requests/sessions.py#L159 + next_url = urlparse(url)._replace(path=parsed_target.path).geturl() + return _request_wrapper(method=method, url=next_url, follow_relative_redirects=True, **params) + return response + + # Perform request and return if status_code is not in the retry list. + response = http_backoff(method=method, url=url, **params) + hf_raise_for_status(response) + return response + + +def _get_file_length_from_http_response(response: requests.Response) -> Optional[int]: + """ + Get the length of the file from the HTTP response headers. + + This function extracts the file size from the HTTP response headers, either from the + `Content-Range` or `Content-Length` header, if available (in that order). + + Args: + response (`requests.Response`): + The HTTP response object. + + Returns: + `int` or `None`: The length of the file in bytes, or None if not available. + """ + + # If HTTP response contains compressed body (e.g. gzip), the `Content-Length` header will + # contain the length of the compressed body, not the uncompressed file size. + # And at the start of transmission there's no way to know the uncompressed file size for gzip, + # thus we return None in that case. + content_encoding = response.headers.get("Content-Encoding", "identity").lower() + if content_encoding != "identity": + # gzip/br/deflate/zstd etc + return None + + content_range = response.headers.get("Content-Range") + if content_range is not None: + return int(content_range.rsplit("/")[-1]) + + content_length = response.headers.get("Content-Length") + if content_length is not None: + return int(content_length) + + return None + + +def http_get( + url: str, + temp_file: BinaryIO, + *, + proxies: Optional[Dict] = None, + resume_size: int = 0, + headers: Optional[Dict[str, Any]] = None, + expected_size: Optional[int] = None, + displayed_filename: Optional[str] = None, + _nb_retries: int = 5, + _tqdm_bar: Optional[tqdm] = None, +) -> None: + """ + Download a remote file. Do not gobble up errors, and will return errors tailored to the Hugging Face Hub. + + If ConnectionError (SSLError) or ReadTimeout happen while streaming data from the server, it is most likely a + transient error (network outage?). We log a warning message and try to resume the download a few times before + giving up. The method gives up after 5 attempts if no new data has being received from the server. + + Args: + url (`str`): + The URL of the file to download. + temp_file (`BinaryIO`): + The file-like object where to save the file. + proxies (`dict`, *optional*): + Dictionary mapping protocol to the URL of the proxy passed to `requests.request`. + resume_size (`int`, *optional*): + The number of bytes already downloaded. If set to 0 (default), the whole file is download. If set to a + positive number, the download will resume at the given position. + headers (`dict`, *optional*): + Dictionary of HTTP Headers to send with the request. + expected_size (`int`, *optional*): + The expected size of the file to download. If set, the download will raise an error if the size of the + received content is different from the expected one. + displayed_filename (`str`, *optional*): + The filename of the file that is being downloaded. Value is used only to display a nice progress bar. If + not set, the filename is guessed from the URL or the `Content-Disposition` header. + """ + if expected_size is not None and resume_size == expected_size: + # If the file is already fully downloaded, we don't need to download it again. + return + + has_custom_range_header = headers is not None and any(h.lower() == "range" for h in headers) + hf_transfer = None + if constants.HF_HUB_ENABLE_HF_TRANSFER: + if resume_size != 0: + warnings.warn("'hf_transfer' does not support `resume_size`: falling back to regular download method") + elif proxies is not None: + warnings.warn("'hf_transfer' does not support `proxies`: falling back to regular download method") + elif has_custom_range_header: + warnings.warn("'hf_transfer' ignores custom 'Range' headers; falling back to regular download method") + else: + try: + import hf_transfer # type: ignore[no-redef] + except ImportError: + raise ValueError( + "Fast download using 'hf_transfer' is enabled" + " (HF_HUB_ENABLE_HF_TRANSFER=1) but 'hf_transfer' package is not" + " available in your environment. Try `pip install hf_transfer`." + ) + + initial_headers = headers + headers = copy.deepcopy(headers) or {} + if resume_size > 0: + headers["Range"] = _adjust_range_header(headers.get("Range"), resume_size) + elif expected_size and expected_size > constants.MAX_HTTP_DOWNLOAD_SIZE: + # Any files over 50GB will not be available through basic http request. + # Setting the range header to 0-0 will force the server to return the file size in the Content-Range header. + # Since hf_transfer splits the download into chunks, the process will succeed afterwards. + if hf_transfer: + headers["Range"] = "bytes=0-0" + else: + raise ValueError( + "The file is too large to be downloaded using the regular download method. Use `hf_transfer` or `hf_xet` instead." + " Try `pip install hf_transfer` or `pip install hf_xet`." + ) + + r = _request_wrapper( + method="GET", url=url, stream=True, proxies=proxies, headers=headers, timeout=constants.HF_HUB_DOWNLOAD_TIMEOUT + ) + + hf_raise_for_status(r) + total: Optional[int] = _get_file_length_from_http_response(r) + + if displayed_filename is None: + displayed_filename = url + content_disposition = r.headers.get("Content-Disposition") + if content_disposition is not None: + match = HEADER_FILENAME_PATTERN.search(content_disposition) + if match is not None: + # Means file is on CDN + displayed_filename = match.groupdict()["filename"] + + # Truncate filename if too long to display + if len(displayed_filename) > 40: + displayed_filename = f"(…){displayed_filename[-40:]}" + + consistency_error_message = ( + f"Consistency check failed: file should be of size {expected_size} but has size" + f" {{actual_size}} ({displayed_filename}).\nThis is usually due to network issues while downloading the file." + " Please retry with `force_download=True`." + ) + progress_cm = _get_progress_bar_context( + desc=displayed_filename, + log_level=logger.getEffectiveLevel(), + total=total, + initial=resume_size, + name="huggingface_hub.http_get", + _tqdm_bar=_tqdm_bar, + ) + + with progress_cm as progress: + if hf_transfer and total is not None and total > 5 * constants.DOWNLOAD_CHUNK_SIZE: + supports_callback = "callback" in inspect.signature(hf_transfer.download).parameters + if not supports_callback: + warnings.warn( + "You are using an outdated version of `hf_transfer`. " + "Consider upgrading to latest version to enable progress bars " + "using `pip install -U hf_transfer`." + ) + try: + hf_transfer.download( + url=url, + filename=temp_file.name, + max_files=constants.HF_TRANSFER_CONCURRENCY, + chunk_size=constants.DOWNLOAD_CHUNK_SIZE, + headers=initial_headers, + parallel_failures=3, + max_retries=5, + **({"callback": progress.update} if supports_callback else {}), + ) + except Exception as e: + raise RuntimeError( + "An error occurred while downloading using `hf_transfer`. Consider" + " disabling HF_HUB_ENABLE_HF_TRANSFER for better error handling." + ) from e + if not supports_callback: + progress.update(total) + if expected_size is not None and expected_size != os.path.getsize(temp_file.name): + raise EnvironmentError( + consistency_error_message.format( + actual_size=os.path.getsize(temp_file.name), + ) + ) + return + new_resume_size = resume_size + try: + for chunk in r.iter_content(chunk_size=constants.DOWNLOAD_CHUNK_SIZE): + if chunk: # filter out keep-alive new chunks + progress.update(len(chunk)) + temp_file.write(chunk) + new_resume_size += len(chunk) + # Some data has been downloaded from the server so we reset the number of retries. + _nb_retries = 5 + except (requests.ConnectionError, requests.ReadTimeout) as e: + # If ConnectionError (SSLError) or ReadTimeout happen while streaming data from the server, it is most likely + # a transient error (network outage?). We log a warning message and try to resume the download a few times + # before giving up. Tre retry mechanism is basic but should be enough in most cases. + if _nb_retries <= 0: + logger.warning("Error while downloading from %s: %s\nMax retries exceeded.", url, str(e)) + raise + logger.warning("Error while downloading from %s: %s\nTrying to resume download...", url, str(e)) + time.sleep(1) + reset_sessions() # In case of SSLError it's best to reset the shared requests.Session objects + return http_get( + url=url, + temp_file=temp_file, + proxies=proxies, + resume_size=new_resume_size, + headers=initial_headers, + expected_size=expected_size, + _nb_retries=_nb_retries - 1, + _tqdm_bar=_tqdm_bar, + ) + + if expected_size is not None and expected_size != temp_file.tell(): + raise EnvironmentError( + consistency_error_message.format( + actual_size=temp_file.tell(), + ) + ) + + +def xet_get( + *, + incomplete_path: Path, + xet_file_data: XetFileData, + headers: Dict[str, str], + expected_size: Optional[int] = None, + displayed_filename: Optional[str] = None, + _tqdm_bar: Optional[tqdm] = None, +) -> None: + """ + Download a file using Xet storage service. + + Args: + incomplete_path (`Path`): + The path to the file to download. + xet_file_data (`XetFileData`): + The file metadata needed to make the request to the xet storage service. + headers (`Dict[str, str]`): + The headers to send to the xet storage service. + expected_size (`int`, *optional*): + The expected size of the file to download. If set, the download will raise an error if the size of the + received content is different from the expected one. + displayed_filename (`str`, *optional*): + The filename of the file that is being downloaded. Value is used only to display a nice progress bar. If + not set, the filename is guessed from the URL or the `Content-Disposition` header. + + **How it works:** + The file download system uses Xet storage, which is a content-addressable storage system that breaks files into chunks + for efficient storage and transfer. + + `hf_xet.download_files` manages downloading files by: + - Taking a list of files to download (each with its unique content hash) + - Connecting to a storage server (CAS server) that knows how files are chunked + - Using authentication to ensure secure access + - Providing progress updates during download + + Authentication works by regularly refreshing access tokens through `refresh_xet_connection_info` to maintain a valid + connection to the storage server. + + The download process works like this: + 1. Create a local cache folder at `~/.cache/huggingface/xet/chunk-cache` to store reusable file chunks + 2. Download files in parallel: + 2.1. Prepare to write the file to disk + 2.2. Ask the server "how is this file split into chunks?" using the file's unique hash + The server responds with: + - Which chunks make up the complete file + - Where each chunk can be downloaded from + 2.3. For each needed chunk: + - Checks if we already have it in our local cache + - If not, download it from cloud storage (S3) + - Save it to cache for future use + - Assemble the chunks in order to recreate the original file + + """ + try: + from hf_xet import PyXetDownloadInfo, download_files # type: ignore[no-redef] + except ImportError: + raise ValueError( + "To use optimized download using Xet storage, you need to install the hf_xet package. " + 'Try `pip install "huggingface_hub[hf_xet]"` or `pip install hf_xet`.' + ) + + connection_info = refresh_xet_connection_info(file_data=xet_file_data, headers=headers) + + def token_refresher() -> Tuple[str, int]: + connection_info = refresh_xet_connection_info(file_data=xet_file_data, headers=headers) + if connection_info is None: + raise ValueError("Failed to refresh token using xet metadata.") + return connection_info.access_token, connection_info.expiration_unix_epoch + + xet_download_info = [ + PyXetDownloadInfo( + destination_path=str(incomplete_path.absolute()), hash=xet_file_data.file_hash, file_size=expected_size + ) + ] + + if not displayed_filename: + displayed_filename = incomplete_path.name + + # Truncate filename if too long to display + if len(displayed_filename) > 40: + displayed_filename = f"{displayed_filename[:40]}(…)" + + progress_cm = _get_progress_bar_context( + desc=displayed_filename, + log_level=logger.getEffectiveLevel(), + total=expected_size, + initial=0, + name="huggingface_hub.xet_get", + _tqdm_bar=_tqdm_bar, + ) + + with progress_cm as progress: + + def progress_updater(progress_bytes: float): + progress.update(progress_bytes) + + download_files( + xet_download_info, + endpoint=connection_info.endpoint, + token_info=(connection_info.access_token, connection_info.expiration_unix_epoch), + token_refresher=token_refresher, + progress_updater=[progress_updater], + ) + + +def _normalize_etag(etag: Optional[str]) -> Optional[str]: + """Normalize ETag HTTP header, so it can be used to create nice filepaths. + + The HTTP spec allows two forms of ETag: + ETag: W/"" + ETag: "" + + For now, we only expect the second form from the server, but we want to be future-proof so we support both. For + more context, see `TestNormalizeEtag` tests and https://github.com/huggingface/huggingface_hub/pull/1428. + + Args: + etag (`str`, *optional*): HTTP header + + Returns: + `str` or `None`: string that can be used as a nice directory name. + Returns `None` if input is None. + """ + if etag is None: + return None + return etag.lstrip("W/").strip('"') + + +def _create_relative_symlink(src: str, dst: str, new_blob: bool = False) -> None: + """Alias method used in `transformers` conversion script.""" + return _create_symlink(src=src, dst=dst, new_blob=new_blob) + + +def _create_symlink(src: str, dst: str, new_blob: bool = False) -> None: + """Create a symbolic link named dst pointing to src. + + By default, it will try to create a symlink using a relative path. Relative paths have 2 advantages: + - If the cache_folder is moved (example: back-up on a shared drive), relative paths within the cache folder will + not break. + - Relative paths seems to be better handled on Windows. Issue was reported 3 times in less than a week when + changing from relative to absolute paths. See https://github.com/huggingface/huggingface_hub/issues/1398, + https://github.com/huggingface/diffusers/issues/2729 and https://github.com/huggingface/transformers/pull/22228. + NOTE: The issue with absolute paths doesn't happen on admin mode. + When creating a symlink from the cache to a local folder, it is possible that a relative path cannot be created. + This happens when paths are not on the same volume. In that case, we use absolute paths. + + + The result layout looks something like + └── [ 128] snapshots + ├── [ 128] 2439f60ef33a0d46d85da5001d52aeda5b00ce9f + │ ├── [ 52] README.md -> ../../../blobs/d7edf6bd2a681fb0175f7735299831ee1b22b812 + │ └── [ 76] pytorch_model.bin -> ../../../blobs/403450e234d65943a7dcf7e05a771ce3c92faa84dd07db4ac20f592037a1e4bd + + If symlinks cannot be created on this platform (most likely to be Windows), the workaround is to avoid symlinks by + having the actual file in `dst`. If it is a new file (`new_blob=True`), we move it to `dst`. If it is not a new file + (`new_blob=False`), we don't know if the blob file is already referenced elsewhere. To avoid breaking existing + cache, the file is duplicated on the disk. + + In case symlinks are not supported, a warning message is displayed to the user once when loading `huggingface_hub`. + The warning message can be disabled with the `DISABLE_SYMLINKS_WARNING` environment variable. + """ + try: + os.remove(dst) + except OSError: + pass + + abs_src = os.path.abspath(os.path.expanduser(src)) + abs_dst = os.path.abspath(os.path.expanduser(dst)) + abs_dst_folder = os.path.dirname(abs_dst) + + # Use relative_dst in priority + try: + relative_src = os.path.relpath(abs_src, abs_dst_folder) + except ValueError: + # Raised on Windows if src and dst are not on the same volume. This is the case when creating a symlink to a + # local_dir instead of within the cache directory. + # See https://docs.python.org/3/library/os.path.html#os.path.relpath + relative_src = None + + try: + commonpath = os.path.commonpath([abs_src, abs_dst]) + _support_symlinks = are_symlinks_supported(commonpath) + except ValueError: + # Raised if src and dst are not on the same volume. Symlinks will still work on Linux/Macos. + # See https://docs.python.org/3/library/os.path.html#os.path.commonpath + _support_symlinks = os.name != "nt" + except PermissionError: + # Permission error means src and dst are not in the same volume (e.g. destination path has been provided + # by the user via `local_dir`. Let's test symlink support there) + _support_symlinks = are_symlinks_supported(abs_dst_folder) + except OSError as e: + # OS error (errno=30) means that the commonpath is readonly on Linux/MacOS. + if e.errno == errno.EROFS: + _support_symlinks = are_symlinks_supported(abs_dst_folder) + else: + raise + + # Symlinks are supported => let's create a symlink. + if _support_symlinks: + src_rel_or_abs = relative_src or abs_src + logger.debug(f"Creating pointer from {src_rel_or_abs} to {abs_dst}") + try: + os.symlink(src_rel_or_abs, abs_dst) + return + except FileExistsError: + if os.path.islink(abs_dst) and os.path.realpath(abs_dst) == os.path.realpath(abs_src): + # `abs_dst` already exists and is a symlink to the `abs_src` blob. It is most likely that the file has + # been cached twice concurrently (exactly between `os.remove` and `os.symlink`). Do nothing. + return + else: + # Very unlikely to happen. Means a file `dst` has been created exactly between `os.remove` and + # `os.symlink` and is not a symlink to the `abs_src` blob file. Raise exception. + raise + except PermissionError: + # Permission error means src and dst are not in the same volume (e.g. download to local dir) and symlink + # is supported on both volumes but not between them. Let's just make a hard copy in that case. + pass + + # Symlinks are not supported => let's move or copy the file. + if new_blob: + logger.info(f"Symlink not supported. Moving file from {abs_src} to {abs_dst}") + shutil.move(abs_src, abs_dst, copy_function=_copy_no_matter_what) + else: + logger.info(f"Symlink not supported. Copying file from {abs_src} to {abs_dst}") + shutil.copyfile(abs_src, abs_dst) + + +def _cache_commit_hash_for_specific_revision(storage_folder: str, revision: str, commit_hash: str) -> None: + """Cache reference between a revision (tag, branch or truncated commit hash) and the corresponding commit hash. + + Does nothing if `revision` is already a proper `commit_hash` or reference is already cached. + """ + if revision != commit_hash: + ref_path = Path(storage_folder) / "refs" / revision + ref_path.parent.mkdir(parents=True, exist_ok=True) + if not ref_path.exists() or commit_hash != ref_path.read_text(): + # Update ref only if has been updated. Could cause useless error in case + # repo is already cached and user doesn't have write access to cache folder. + # See https://github.com/huggingface/huggingface_hub/issues/1216. + ref_path.write_text(commit_hash) + + +@validate_hf_hub_args +def repo_folder_name(*, repo_id: str, repo_type: str) -> str: + """Return a serialized version of a hf.co repo name and type, safe for disk storage + as a single non-nested folder. + + Example: models--julien-c--EsperBERTo-small + """ + # remove all `/` occurrences to correctly convert repo to directory name + parts = [f"{repo_type}s", *repo_id.split("/")] + return constants.REPO_ID_SEPARATOR.join(parts) + + +def _check_disk_space(expected_size: int, target_dir: Union[str, Path]) -> None: + """Check disk usage and log a warning if there is not enough disk space to download the file. + + Args: + expected_size (`int`): + The expected size of the file in bytes. + target_dir (`str`): + The directory where the file will be stored after downloading. + """ + + target_dir = Path(target_dir) # format as `Path` + for path in [target_dir] + list(target_dir.parents): # first check target_dir, then each parents one by one + try: + target_dir_free = shutil.disk_usage(path).free + if target_dir_free < expected_size: + warnings.warn( + "Not enough free disk space to download the file. " + f"The expected file size is: {expected_size / 1e6:.2f} MB. " + f"The target location {target_dir} only has {target_dir_free / 1e6:.2f} MB free disk space." + ) + return + except OSError: # raise on anything: file does not exist or space disk cannot be checked + pass + + +@validate_hf_hub_args +def hf_hub_download( + repo_id: str, + filename: str, + *, + subfolder: Optional[str] = None, + repo_type: Optional[str] = None, + revision: Optional[str] = None, + library_name: Optional[str] = None, + library_version: Optional[str] = None, + cache_dir: Union[str, Path, None] = None, + local_dir: Union[str, Path, None] = None, + user_agent: Union[Dict, str, None] = None, + force_download: bool = False, + proxies: Optional[Dict] = None, + etag_timeout: float = constants.DEFAULT_ETAG_TIMEOUT, + token: Union[bool, str, None] = None, + local_files_only: bool = False, + headers: Optional[Dict[str, str]] = None, + endpoint: Optional[str] = None, + resume_download: Optional[bool] = None, + force_filename: Optional[str] = None, + local_dir_use_symlinks: Union[bool, Literal["auto"]] = "auto", +) -> str: + """Download a given file if it's not already present in the local cache. + + The new cache file layout looks like this: + - The cache directory contains one subfolder per repo_id (namespaced by repo type) + - inside each repo folder: + - refs is a list of the latest known revision => commit_hash pairs + - blobs contains the actual file blobs (identified by their git-sha or sha256, depending on + whether they're LFS files or not) + - snapshots contains one subfolder per commit, each "commit" contains the subset of the files + that have been resolved at that particular commit. Each filename is a symlink to the blob + at that particular commit. + + ``` + [ 96] . + └── [ 160] models--julien-c--EsperBERTo-small + ├── [ 160] blobs + │ ├── [321M] 403450e234d65943a7dcf7e05a771ce3c92faa84dd07db4ac20f592037a1e4bd + │ ├── [ 398] 7cb18dc9bafbfcf74629a4b760af1b160957a83e + │ └── [1.4K] d7edf6bd2a681fb0175f7735299831ee1b22b812 + ├── [ 96] refs + │ └── [ 40] main + └── [ 128] snapshots + ├── [ 128] 2439f60ef33a0d46d85da5001d52aeda5b00ce9f + │ ├── [ 52] README.md -> ../../blobs/d7edf6bd2a681fb0175f7735299831ee1b22b812 + │ └── [ 76] pytorch_model.bin -> ../../blobs/403450e234d65943a7dcf7e05a771ce3c92faa84dd07db4ac20f592037a1e4bd + └── [ 128] bbc77c8132af1cc5cf678da3f1ddf2de43606d48 + ├── [ 52] README.md -> ../../blobs/7cb18dc9bafbfcf74629a4b760af1b160957a83e + └── [ 76] pytorch_model.bin -> ../../blobs/403450e234d65943a7dcf7e05a771ce3c92faa84dd07db4ac20f592037a1e4bd + ``` + + If `local_dir` is provided, the file structure from the repo will be replicated in this location. When using this + option, the `cache_dir` will not be used and a `.cache/huggingface/` folder will be created at the root of `local_dir` + to store some metadata related to the downloaded files. While this mechanism is not as robust as the main + cache-system, it's optimized for regularly pulling the latest version of a repository. + + Args: + repo_id (`str`): + A user or an organization name and a repo name separated by a `/`. + filename (`str`): + The name of the file in the repo. + subfolder (`str`, *optional*): + An optional value corresponding to a folder inside the model repo. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if downloading from a dataset or space, + `None` or `"model"` if downloading from a model. Default is `None`. + revision (`str`, *optional*): + An optional Git revision id which can be a branch name, a tag, or a + commit hash. + library_name (`str`, *optional*): + The name of the library to which the object corresponds. + library_version (`str`, *optional*): + The version of the library. + cache_dir (`str`, `Path`, *optional*): + Path to the folder where cached files are stored. + local_dir (`str` or `Path`, *optional*): + If provided, the downloaded file will be placed under this directory. + user_agent (`dict`, `str`, *optional*): + The user-agent info in the form of a dictionary or a string. + force_download (`bool`, *optional*, defaults to `False`): + Whether the file should be downloaded even if it already exists in + the local cache. + proxies (`dict`, *optional*): + Dictionary mapping protocol to the URL of the proxy passed to + `requests.request`. + etag_timeout (`float`, *optional*, defaults to `10`): + When fetching ETag, how many seconds to wait for the server to send + data before giving up which is passed to `requests.request`. + token (`str`, `bool`, *optional*): + A token to be used for the download. + - If `True`, the token is read from the HuggingFace config + folder. + - If a string, it's used as the authentication token. + local_files_only (`bool`, *optional*, defaults to `False`): + If `True`, avoid downloading the file and return the path to the + local cached file if it exists. + headers (`dict`, *optional*): + Additional headers to be sent with the request. + + Returns: + `str`: Local path of file or if networking is off, last version of file cached on disk. + + Raises: + [`~utils.RepositoryNotFoundError`] + If the repository to download from cannot be found. This may be because it doesn't exist, + or because it is set to `private` and you do not have access. + [`~utils.RevisionNotFoundError`] + If the revision to download from cannot be found. + [`~utils.EntryNotFoundError`] + If the file to download cannot be found. + [`~utils.LocalEntryNotFoundError`] + If network is disabled or unavailable and file is not found in cache. + [`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError) + If `token=True` but the token cannot be found. + [`OSError`](https://docs.python.org/3/library/exceptions.html#OSError) + If ETag cannot be determined. + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + If some parameter value is invalid. + + """ + if constants.HF_HUB_ETAG_TIMEOUT != constants.DEFAULT_ETAG_TIMEOUT: + # Respect environment variable above user value + etag_timeout = constants.HF_HUB_ETAG_TIMEOUT + + if force_filename is not None: + warnings.warn( + "The `force_filename` parameter is deprecated as a new caching system, " + "which keeps the filenames as they are on the Hub, is now in place.", + FutureWarning, + ) + if resume_download is not None: + warnings.warn( + "`resume_download` is deprecated and will be removed in version 1.0.0. " + "Downloads always resume when possible. " + "If you want to force a new download, use `force_download=True`.", + FutureWarning, + ) + + if cache_dir is None: + cache_dir = constants.HF_HUB_CACHE + if revision is None: + revision = constants.DEFAULT_REVISION + if isinstance(cache_dir, Path): + cache_dir = str(cache_dir) + if isinstance(local_dir, Path): + local_dir = str(local_dir) + + if subfolder == "": + subfolder = None + if subfolder is not None: + # This is used to create a URL, and not a local path, hence the forward slash. + filename = f"{subfolder}/{filename}" + + if repo_type is None: + repo_type = "model" + if repo_type not in constants.REPO_TYPES: + raise ValueError(f"Invalid repo type: {repo_type}. Accepted repo types are: {str(constants.REPO_TYPES)}") + + hf_headers = build_hf_headers( + token=token, + library_name=library_name, + library_version=library_version, + user_agent=user_agent, + headers=headers, + ) + + if local_dir is not None: + if local_dir_use_symlinks != "auto": + warnings.warn( + "`local_dir_use_symlinks` parameter is deprecated and will be ignored. " + "The process to download files to a local folder has been updated and do " + "not rely on symlinks anymore. You only need to pass a destination folder " + "as`local_dir`.\n" + "For more details, check out https://huggingface.co/docs/huggingface_hub/main/en/guides/download#download-files-to-local-folder." + ) + + return _hf_hub_download_to_local_dir( + # Destination + local_dir=local_dir, + # File info + repo_id=repo_id, + repo_type=repo_type, + filename=filename, + revision=revision, + # HTTP info + endpoint=endpoint, + etag_timeout=etag_timeout, + headers=hf_headers, + proxies=proxies, + token=token, + # Additional options + cache_dir=cache_dir, + force_download=force_download, + local_files_only=local_files_only, + ) + else: + return _hf_hub_download_to_cache_dir( + # Destination + cache_dir=cache_dir, + # File info + repo_id=repo_id, + filename=filename, + repo_type=repo_type, + revision=revision, + # HTTP info + endpoint=endpoint, + etag_timeout=etag_timeout, + headers=hf_headers, + proxies=proxies, + token=token, + # Additional options + local_files_only=local_files_only, + force_download=force_download, + ) + + +def _hf_hub_download_to_cache_dir( + *, + # Destination + cache_dir: str, + # File info + repo_id: str, + filename: str, + repo_type: str, + revision: str, + # HTTP info + endpoint: Optional[str], + etag_timeout: float, + headers: Dict[str, str], + proxies: Optional[Dict], + token: Optional[Union[bool, str]], + # Additional options + local_files_only: bool, + force_download: bool, +) -> str: + """Download a given file to a cache folder, if not already present. + + Method should not be called directly. Please use `hf_hub_download` instead. + """ + locks_dir = os.path.join(cache_dir, ".locks") + storage_folder = os.path.join(cache_dir, repo_folder_name(repo_id=repo_id, repo_type=repo_type)) + + # cross platform transcription of filename, to be used as a local file path. + relative_filename = os.path.join(*filename.split("/")) + if os.name == "nt": + if relative_filename.startswith("..\\") or "\\..\\" in relative_filename: + raise ValueError( + f"Invalid filename: cannot handle filename '{relative_filename}' on Windows. Please ask the repository" + " owner to rename this file." + ) + + # if user provides a commit_hash and they already have the file on disk, shortcut everything. + if REGEX_COMMIT_HASH.match(revision): + pointer_path = _get_pointer_path(storage_folder, revision, relative_filename) + if os.path.exists(pointer_path) and not force_download: + return pointer_path + + # Try to get metadata (etag, commit_hash, url, size) from the server. + # If we can't, a HEAD request error is returned. + (url_to_download, etag, commit_hash, expected_size, xet_file_data, head_call_error) = _get_metadata_or_catch_error( + repo_id=repo_id, + filename=filename, + repo_type=repo_type, + revision=revision, + endpoint=endpoint, + proxies=proxies, + etag_timeout=etag_timeout, + headers=headers, + token=token, + local_files_only=local_files_only, + storage_folder=storage_folder, + relative_filename=relative_filename, + ) + + # etag can be None for several reasons: + # 1. we passed local_files_only. + # 2. we don't have a connection + # 3. Hub is down (HTTP 500, 503, 504) + # 4. repo is not found -for example private or gated- and invalid/missing token sent + # 5. Hub is blocked by a firewall or proxy is not set correctly. + # => Try to get the last downloaded one from the specified revision. + # + # If the specified revision is a commit hash, look inside "snapshots". + # If the specified revision is a branch or tag, look inside "refs". + if head_call_error is not None: + # Couldn't make a HEAD call => let's try to find a local file + if not force_download: + commit_hash = None + if REGEX_COMMIT_HASH.match(revision): + commit_hash = revision + else: + ref_path = os.path.join(storage_folder, "refs", revision) + if os.path.isfile(ref_path): + with open(ref_path) as f: + commit_hash = f.read() + + # Return pointer file if exists + if commit_hash is not None: + pointer_path = _get_pointer_path(storage_folder, commit_hash, relative_filename) + if os.path.exists(pointer_path) and not force_download: + return pointer_path + + # Otherwise, raise appropriate error + _raise_on_head_call_error(head_call_error, force_download, local_files_only) + + # From now on, etag, commit_hash, url and size are not None. + assert etag is not None, "etag must have been retrieved from server" + assert commit_hash is not None, "commit_hash must have been retrieved from server" + assert url_to_download is not None, "file location must have been retrieved from server" + assert expected_size is not None, "expected_size must have been retrieved from server" + blob_path = os.path.join(storage_folder, "blobs", etag) + pointer_path = _get_pointer_path(storage_folder, commit_hash, relative_filename) + + os.makedirs(os.path.dirname(blob_path), exist_ok=True) + os.makedirs(os.path.dirname(pointer_path), exist_ok=True) + + # if passed revision is not identical to commit_hash + # then revision has to be a branch name or tag name. + # In that case store a ref. + _cache_commit_hash_for_specific_revision(storage_folder, revision, commit_hash) + + # Prevent parallel downloads of the same file with a lock. + # etag could be duplicated across repos, + lock_path = os.path.join(locks_dir, repo_folder_name(repo_id=repo_id, repo_type=repo_type), f"{etag}.lock") + + # Some Windows versions do not allow for paths longer than 255 characters. + # In this case, we must specify it as an extended path by using the "\\?\" prefix. + if ( + os.name == "nt" + and len(os.path.abspath(lock_path)) > 255 + and not os.path.abspath(lock_path).startswith("\\\\?\\") + ): + lock_path = "\\\\?\\" + os.path.abspath(lock_path) + + if ( + os.name == "nt" + and len(os.path.abspath(blob_path)) > 255 + and not os.path.abspath(blob_path).startswith("\\\\?\\") + ): + blob_path = "\\\\?\\" + os.path.abspath(blob_path) + + Path(lock_path).parent.mkdir(parents=True, exist_ok=True) + + # pointer already exists -> immediate return + if not force_download and os.path.exists(pointer_path): + return pointer_path + + # Blob exists but pointer must be (safely) created -> take the lock + if not force_download and os.path.exists(blob_path): + with WeakFileLock(lock_path): + if not os.path.exists(pointer_path): + _create_symlink(blob_path, pointer_path, new_blob=False) + return pointer_path + + # Local file doesn't exist or etag isn't a match => retrieve file from remote (or cache) + + with WeakFileLock(lock_path): + _download_to_tmp_and_move( + incomplete_path=Path(blob_path + ".incomplete"), + destination_path=Path(blob_path), + url_to_download=url_to_download, + proxies=proxies, + headers=headers, + expected_size=expected_size, + filename=filename, + force_download=force_download, + etag=etag, + xet_file_data=xet_file_data, + ) + if not os.path.exists(pointer_path): + _create_symlink(blob_path, pointer_path, new_blob=True) + + return pointer_path + + +def _hf_hub_download_to_local_dir( + *, + # Destination + local_dir: Union[str, Path], + # File info + repo_id: str, + repo_type: str, + filename: str, + revision: str, + # HTTP info + endpoint: Optional[str], + etag_timeout: float, + headers: Dict[str, str], + proxies: Optional[Dict], + token: Union[bool, str, None], + # Additional options + cache_dir: str, + force_download: bool, + local_files_only: bool, +) -> str: + """Download a given file to a local folder, if not already present. + + Method should not be called directly. Please use `hf_hub_download` instead. + """ + # Some Windows versions do not allow for paths longer than 255 characters. + # In this case, we must specify it as an extended path by using the "\\?\" prefix. + if os.name == "nt" and len(os.path.abspath(local_dir)) > 255: + local_dir = "\\\\?\\" + os.path.abspath(local_dir) + local_dir = Path(local_dir) + paths = get_local_download_paths(local_dir=local_dir, filename=filename) + local_metadata = read_download_metadata(local_dir=local_dir, filename=filename) + + # Local file exists + metadata exists + commit_hash matches => return file + if ( + not force_download + and REGEX_COMMIT_HASH.match(revision) + and paths.file_path.is_file() + and local_metadata is not None + and local_metadata.commit_hash == revision + ): + return str(paths.file_path) + + # Local file doesn't exist or commit_hash doesn't match => we need the etag + (url_to_download, etag, commit_hash, expected_size, xet_file_data, head_call_error) = _get_metadata_or_catch_error( + repo_id=repo_id, + filename=filename, + repo_type=repo_type, + revision=revision, + endpoint=endpoint, + proxies=proxies, + etag_timeout=etag_timeout, + headers=headers, + token=token, + local_files_only=local_files_only, + ) + + if head_call_error is not None: + # No HEAD call but local file exists => default to local file + if not force_download and paths.file_path.is_file(): + logger.warning( + f"Couldn't access the Hub to check for update but local file already exists. Defaulting to existing file. (error: {head_call_error})" + ) + return str(paths.file_path) + # Otherwise => raise + _raise_on_head_call_error(head_call_error, force_download, local_files_only) + + # From now on, etag, commit_hash, url and size are not None. + assert etag is not None, "etag must have been retrieved from server" + assert commit_hash is not None, "commit_hash must have been retrieved from server" + assert url_to_download is not None, "file location must have been retrieved from server" + assert expected_size is not None, "expected_size must have been retrieved from server" + + # Local file exists => check if it's up-to-date + if not force_download and paths.file_path.is_file(): + # etag matches => update metadata and return file + if local_metadata is not None and local_metadata.etag == etag: + write_download_metadata(local_dir=local_dir, filename=filename, commit_hash=commit_hash, etag=etag) + return str(paths.file_path) + + # metadata is outdated + etag is a sha256 + # => means it's an LFS file (large) + # => let's compute local hash and compare + # => if match, update metadata and return file + if local_metadata is None and REGEX_SHA256.match(etag) is not None: + with open(paths.file_path, "rb") as f: + file_hash = sha_fileobj(f).hex() + if file_hash == etag: + write_download_metadata(local_dir=local_dir, filename=filename, commit_hash=commit_hash, etag=etag) + return str(paths.file_path) + + # Local file doesn't exist or etag isn't a match => retrieve file from remote (or cache) + + # If we are lucky enough, the file is already in the cache => copy it + if not force_download: + cached_path = try_to_load_from_cache( + repo_id=repo_id, + filename=filename, + cache_dir=cache_dir, + revision=commit_hash, + repo_type=repo_type, + ) + if isinstance(cached_path, str): + with WeakFileLock(paths.lock_path): + paths.file_path.parent.mkdir(parents=True, exist_ok=True) + shutil.copyfile(cached_path, paths.file_path) + write_download_metadata(local_dir=local_dir, filename=filename, commit_hash=commit_hash, etag=etag) + return str(paths.file_path) + + # Otherwise, let's download the file! + with WeakFileLock(paths.lock_path): + paths.file_path.unlink(missing_ok=True) # delete outdated file first + _download_to_tmp_and_move( + incomplete_path=paths.incomplete_path(etag), + destination_path=paths.file_path, + url_to_download=url_to_download, + proxies=proxies, + headers=headers, + expected_size=expected_size, + filename=filename, + force_download=force_download, + etag=etag, + xet_file_data=xet_file_data, + ) + + write_download_metadata(local_dir=local_dir, filename=filename, commit_hash=commit_hash, etag=etag) + return str(paths.file_path) + + +@validate_hf_hub_args +def try_to_load_from_cache( + repo_id: str, + filename: str, + cache_dir: Union[str, Path, None] = None, + revision: Optional[str] = None, + repo_type: Optional[str] = None, +) -> Union[str, _CACHED_NO_EXIST_T, None]: + """ + Explores the cache to return the latest cached file for a given revision if found. + + This function will not raise any exception if the file in not cached. + + Args: + cache_dir (`str` or `os.PathLike`): + The folder where the cached files lie. + repo_id (`str`): + The ID of the repo on huggingface.co. + filename (`str`): + The filename to look for inside `repo_id`. + revision (`str`, *optional*): + The specific model version to use. Will default to `"main"` if it's not provided and no `commit_hash` is + provided either. + repo_type (`str`, *optional*): + The type of the repository. Will default to `"model"`. + + Returns: + `Optional[str]` or `_CACHED_NO_EXIST`: + Will return `None` if the file was not cached. Otherwise: + - The exact path to the cached file if it's found in the cache + - A special value `_CACHED_NO_EXIST` if the file does not exist at the given commit hash and this fact was + cached. + + Example: + + ```python + from huggingface_hub import try_to_load_from_cache, _CACHED_NO_EXIST + + filepath = try_to_load_from_cache() + if isinstance(filepath, str): + # file exists and is cached + ... + elif filepath is _CACHED_NO_EXIST: + # non-existence of file is cached + ... + else: + # file is not cached + ... + ``` + """ + if revision is None: + revision = "main" + if repo_type is None: + repo_type = "model" + if repo_type not in constants.REPO_TYPES: + raise ValueError(f"Invalid repo type: {repo_type}. Accepted repo types are: {str(constants.REPO_TYPES)}") + if cache_dir is None: + cache_dir = constants.HF_HUB_CACHE + + object_id = repo_id.replace("/", "--") + repo_cache = os.path.join(cache_dir, f"{repo_type}s--{object_id}") + if not os.path.isdir(repo_cache): + # No cache for this model + return None + + refs_dir = os.path.join(repo_cache, "refs") + snapshots_dir = os.path.join(repo_cache, "snapshots") + no_exist_dir = os.path.join(repo_cache, ".no_exist") + + # Resolve refs (for instance to convert main to the associated commit sha) + if os.path.isdir(refs_dir): + revision_file = os.path.join(refs_dir, revision) + if os.path.isfile(revision_file): + with open(revision_file) as f: + revision = f.read() + + # Check if file is cached as "no_exist" + if os.path.isfile(os.path.join(no_exist_dir, revision, filename)): + return _CACHED_NO_EXIST + + # Check if revision folder exists + if not os.path.exists(snapshots_dir): + return None + cached_shas = os.listdir(snapshots_dir) + if revision not in cached_shas: + # No cache for this revision and we won't try to return a random revision + return None + + # Check if file exists in cache + cached_file = os.path.join(snapshots_dir, revision, filename) + return cached_file if os.path.isfile(cached_file) else None + + +@validate_hf_hub_args +def get_hf_file_metadata( + url: str, + token: Union[bool, str, None] = None, + proxies: Optional[Dict] = None, + timeout: Optional[float] = constants.DEFAULT_REQUEST_TIMEOUT, + library_name: Optional[str] = None, + library_version: Optional[str] = None, + user_agent: Union[Dict, str, None] = None, + headers: Optional[Dict[str, str]] = None, + endpoint: Optional[str] = None, +) -> HfFileMetadata: + """Fetch metadata of a file versioned on the Hub for a given url. + + Args: + url (`str`): + File url, for example returned by [`hf_hub_url`]. + token (`str` or `bool`, *optional*): + A token to be used for the download. + - If `True`, the token is read from the HuggingFace config + folder. + - If `False` or `None`, no token is provided. + - If a string, it's used as the authentication token. + proxies (`dict`, *optional*): + Dictionary mapping protocol to the URL of the proxy passed to + `requests.request`. + timeout (`float`, *optional*, defaults to 10): + How many seconds to wait for the server to send metadata before giving up. + library_name (`str`, *optional*): + The name of the library to which the object corresponds. + library_version (`str`, *optional*): + The version of the library. + user_agent (`dict`, `str`, *optional*): + The user-agent info in the form of a dictionary or a string. + headers (`dict`, *optional*): + Additional headers to be sent with the request. + endpoint (`str`, *optional*): + Endpoint of the Hub. Defaults to . + + Returns: + A [`HfFileMetadata`] object containing metadata such as location, etag, size and + commit_hash. + """ + hf_headers = build_hf_headers( + token=token, + library_name=library_name, + library_version=library_version, + user_agent=user_agent, + headers=headers, + ) + hf_headers["Accept-Encoding"] = "identity" # prevent any compression => we want to know the real size of the file + + # Retrieve metadata + r = _request_wrapper( + method="HEAD", + url=url, + headers=hf_headers, + allow_redirects=False, + follow_relative_redirects=True, + proxies=proxies, + timeout=timeout, + ) + hf_raise_for_status(r) + + # Return + return HfFileMetadata( + commit_hash=r.headers.get(constants.HUGGINGFACE_HEADER_X_REPO_COMMIT), + # We favor a custom header indicating the etag of the linked resource, and + # we fallback to the regular etag header. + etag=_normalize_etag(r.headers.get(constants.HUGGINGFACE_HEADER_X_LINKED_ETAG) or r.headers.get("ETag")), + # Either from response headers (if redirected) or defaults to request url + # Do not use directly `url`, as `_request_wrapper` might have followed relative + # redirects. + location=r.headers.get("Location") or r.request.url, # type: ignore + size=_int_or_none( + r.headers.get(constants.HUGGINGFACE_HEADER_X_LINKED_SIZE) or r.headers.get("Content-Length") + ), + xet_file_data=parse_xet_file_data_from_response(r, endpoint=endpoint), # type: ignore + ) + + +def _get_metadata_or_catch_error( + *, + repo_id: str, + filename: str, + repo_type: str, + revision: str, + endpoint: Optional[str], + proxies: Optional[Dict], + etag_timeout: Optional[float], + headers: Dict[str, str], # mutated inplace! + token: Union[bool, str, None], + local_files_only: bool, + relative_filename: Optional[str] = None, # only used to store `.no_exists` in cache + storage_folder: Optional[str] = None, # only used to store `.no_exists` in cache +) -> Union[ + # Either an exception is caught and returned + Tuple[None, None, None, None, None, Exception], + # Or the metadata is returned as + # `(url_to_download, etag, commit_hash, expected_size, xet_file_data, None)` + Tuple[str, str, str, int, Optional[XetFileData], None], +]: + """Get metadata for a file on the Hub, safely handling network issues. + + Returns either the etag, commit_hash and expected size of the file, or the error + raised while fetching the metadata. + + NOTE: This function mutates `headers` inplace! It removes the `authorization` header + if the file is a LFS blob and the domain of the url is different from the + domain of the location (typically an S3 bucket). + """ + if local_files_only: + return ( + None, + None, + None, + None, + None, + OfflineModeIsEnabled( + f"Cannot access file since 'local_files_only=True' as been set. (repo_id: {repo_id}, repo_type: {repo_type}, revision: {revision}, filename: {filename})" + ), + ) + + url = hf_hub_url(repo_id, filename, repo_type=repo_type, revision=revision, endpoint=endpoint) + url_to_download: str = url + etag: Optional[str] = None + commit_hash: Optional[str] = None + expected_size: Optional[int] = None + head_error_call: Optional[Exception] = None + xet_file_data: Optional[XetFileData] = None + + # Try to get metadata from the server. + # Do not raise yet if the file is not found or not accessible. + if not local_files_only: + try: + try: + metadata = get_hf_file_metadata( + url=url, proxies=proxies, timeout=etag_timeout, headers=headers, token=token, endpoint=endpoint + ) + except EntryNotFoundError as http_error: + if storage_folder is not None and relative_filename is not None: + # Cache the non-existence of the file + commit_hash = http_error.response.headers.get(constants.HUGGINGFACE_HEADER_X_REPO_COMMIT) + if commit_hash is not None: + no_exist_file_path = Path(storage_folder) / ".no_exist" / commit_hash / relative_filename + try: + no_exist_file_path.parent.mkdir(parents=True, exist_ok=True) + no_exist_file_path.touch() + except OSError as e: + logger.error( + f"Could not cache non-existence of file. Will ignore error and continue. Error: {e}" + ) + _cache_commit_hash_for_specific_revision(storage_folder, revision, commit_hash) + raise + + # Commit hash must exist + commit_hash = metadata.commit_hash + if commit_hash is None: + raise FileMetadataError( + "Distant resource does not seem to be on huggingface.co. It is possible that a configuration issue" + " prevents you from downloading resources from https://huggingface.co. Please check your firewall" + " and proxy settings and make sure your SSL certificates are updated." + ) + + # Etag must exist + # If we don't have any of those, raise an error. + etag = metadata.etag + if etag is None: + raise FileMetadataError( + "Distant resource does not have an ETag, we won't be able to reliably ensure reproducibility." + ) + + # Size must exist + expected_size = metadata.size + if expected_size is None: + raise FileMetadataError("Distant resource does not have a Content-Length.") + + xet_file_data = metadata.xet_file_data + + # In case of a redirect, save an extra redirect on the request.get call, + # and ensure we download the exact atomic version even if it changed + # between the HEAD and the GET (unlikely, but hey). + # + # If url domain is different => we are downloading from a CDN => url is signed => don't send auth + # If url domain is the same => redirect due to repo rename AND downloading a regular file => keep auth + if xet_file_data is None and url != metadata.location: + url_to_download = metadata.location + if urlparse(url).netloc != urlparse(metadata.location).netloc: + # Remove authorization header when downloading a LFS blob + headers.pop("authorization", None) + except (requests.exceptions.SSLError, requests.exceptions.ProxyError): + # Actually raise for those subclasses of ConnectionError + raise + except ( + requests.exceptions.ConnectionError, + requests.exceptions.Timeout, + OfflineModeIsEnabled, + ) as error: + # Otherwise, our Internet connection is down. + # etag is None + head_error_call = error + except (RevisionNotFoundError, EntryNotFoundError): + # The repo was found but the revision or entry doesn't exist on the Hub (never existed or got deleted) + raise + except requests.HTTPError as error: + # Multiple reasons for an http error: + # - Repository is private and invalid/missing token sent + # - Repository is gated and invalid/missing token sent + # - Hub is down (error 500 or 504) + # => let's switch to 'local_files_only=True' to check if the files are already cached. + # (if it's not the case, the error will be re-raised) + head_error_call = error + except FileMetadataError as error: + # Multiple reasons for a FileMetadataError: + # - Wrong network configuration (proxy, firewall, SSL certificates) + # - Inconsistency on the Hub + # => let's switch to 'local_files_only=True' to check if the files are already cached. + # (if it's not the case, the error will be re-raised) + head_error_call = error + + if not (local_files_only or etag is not None or head_error_call is not None): + raise RuntimeError("etag is empty due to uncovered problems") + + return (url_to_download, etag, commit_hash, expected_size, xet_file_data, head_error_call) # type: ignore [return-value] + + +def _raise_on_head_call_error(head_call_error: Exception, force_download: bool, local_files_only: bool) -> NoReturn: + """Raise an appropriate error when the HEAD call failed and we cannot locate a local file.""" + # No head call => we cannot force download. + if force_download: + if local_files_only: + raise ValueError("Cannot pass 'force_download=True' and 'local_files_only=True' at the same time.") + elif isinstance(head_call_error, OfflineModeIsEnabled): + raise ValueError("Cannot pass 'force_download=True' when offline mode is enabled.") from head_call_error + else: + raise ValueError("Force download failed due to the above error.") from head_call_error + + # No head call + couldn't find an appropriate file on disk => raise an error. + if local_files_only: + raise LocalEntryNotFoundError( + "Cannot find the requested files in the disk cache and outgoing traffic has been disabled. To enable" + " hf.co look-ups and downloads online, set 'local_files_only' to False." + ) + elif isinstance(head_call_error, (RepositoryNotFoundError, GatedRepoError)) or ( + isinstance(head_call_error, HfHubHTTPError) and head_call_error.response.status_code == 401 + ): + # Repo not found or gated => let's raise the actual error + # Unauthorized => likely a token issue => let's raise the actual error + raise head_call_error + else: + # Otherwise: most likely a connection issue or Hub downtime => let's warn the user + raise LocalEntryNotFoundError( + "An error happened while trying to locate the file on the Hub and we cannot find the requested files" + " in the local cache. Please check your connection and try again or make sure your Internet connection" + " is on." + ) from head_call_error + + +def _download_to_tmp_and_move( + incomplete_path: Path, + destination_path: Path, + url_to_download: str, + proxies: Optional[Dict], + headers: Dict[str, str], + expected_size: Optional[int], + filename: str, + force_download: bool, + etag: Optional[str], + xet_file_data: Optional[XetFileData], +) -> None: + """Download content from a URL to a destination path. + + Internal logic: + - return early if file is already downloaded + - resume download if possible (from incomplete file) + - do not resume download if `force_download=True` or `HF_HUB_ENABLE_HF_TRANSFER=True` + - check disk space before downloading + - download content to a temporary file + - set correct permissions on temporary file + - move the temporary file to the destination path + + Both `incomplete_path` and `destination_path` must be on the same volume to avoid a local copy. + """ + if destination_path.exists() and not force_download: + # Do nothing if already exists (except if force_download=True) + return + + if incomplete_path.exists() and (force_download or (constants.HF_HUB_ENABLE_HF_TRANSFER and not proxies)): + # By default, we will try to resume the download if possible. + # However, if the user has set `force_download=True` or if `hf_transfer` is enabled, then we should + # not resume the download => delete the incomplete file. + message = f"Removing incomplete file '{incomplete_path}'" + if force_download: + message += " (force_download=True)" + elif constants.HF_HUB_ENABLE_HF_TRANSFER and not proxies: + message += " (hf_transfer=True)" + logger.info(message) + incomplete_path.unlink(missing_ok=True) + + with incomplete_path.open("ab") as f: + resume_size = f.tell() + message = f"Downloading '{filename}' to '{incomplete_path}'" + if resume_size > 0 and expected_size is not None: + message += f" (resume from {resume_size}/{expected_size})" + logger.info(message) + + if expected_size is not None: # might be None if HTTP header not set correctly + # Check disk space in both tmp and destination path + _check_disk_space(expected_size, incomplete_path.parent) + _check_disk_space(expected_size, destination_path.parent) + + if xet_file_data is not None and is_xet_available(): + logger.debug("Xet Storage is enabled for this repo. Downloading file from Xet Storage..") + xet_get( + incomplete_path=incomplete_path, + xet_file_data=xet_file_data, + headers=headers, + expected_size=expected_size, + displayed_filename=filename, + ) + else: + if xet_file_data is not None and not constants.HF_HUB_DISABLE_XET: + logger.warning( + "Xet Storage is enabled for this repo, but the 'hf_xet' package is not installed. " + "Falling back to regular HTTP download. " + "For better performance, install the package with: `pip install huggingface_hub[hf_xet]` or `pip install hf_xet`" + ) + + http_get( + url_to_download, + f, + proxies=proxies, + resume_size=resume_size, + headers=headers, + expected_size=expected_size, + ) + + logger.info(f"Download complete. Moving file to {destination_path}") + _chmod_and_move(incomplete_path, destination_path) + + +def _int_or_none(value: Optional[str]) -> Optional[int]: + try: + return int(value) # type: ignore + except (TypeError, ValueError): + return None + + +def _chmod_and_move(src: Path, dst: Path) -> None: + """Set correct permission before moving a blob from tmp directory to cache dir. + + Do not take into account the `umask` from the process as there is no convenient way + to get it that is thread-safe. + + See: + - About umask: https://docs.python.org/3/library/os.html#os.umask + - Thread-safety: https://stackoverflow.com/a/70343066 + - About solution: https://github.com/huggingface/huggingface_hub/pull/1220#issuecomment-1326211591 + - Fix issue: https://github.com/huggingface/huggingface_hub/issues/1141 + - Fix issue: https://github.com/huggingface/huggingface_hub/issues/1215 + """ + # Get umask by creating a temporary file in the cached repo folder. + tmp_file = dst.parent.parent / f"tmp_{uuid.uuid4()}" + try: + tmp_file.touch() + cache_dir_mode = Path(tmp_file).stat().st_mode + os.chmod(str(src), stat.S_IMODE(cache_dir_mode)) + except OSError as e: + logger.warning( + f"Could not set the permissions on the file '{src}'. Error: {e}.\nContinuing without setting permissions." + ) + finally: + try: + tmp_file.unlink() + except OSError: + # fails if `tmp_file.touch()` failed => do nothing + # See https://github.com/huggingface/huggingface_hub/issues/2359 + pass + + shutil.move(str(src), str(dst), copy_function=_copy_no_matter_what) + + +def _copy_no_matter_what(src: str, dst: str) -> None: + """Copy file from src to dst. + + If `shutil.copy2` fails, fallback to `shutil.copyfile`. + """ + try: + # Copy file with metadata and permission + # Can fail e.g. if dst is an S3 mount + shutil.copy2(src, dst) + except OSError: + # Copy only file content + shutil.copyfile(src, dst) + + +def _get_pointer_path(storage_folder: str, revision: str, relative_filename: str) -> str: + # Using `os.path.abspath` instead of `Path.resolve()` to avoid resolving symlinks + snapshot_path = os.path.join(storage_folder, "snapshots") + pointer_path = os.path.join(snapshot_path, revision, relative_filename) + if Path(os.path.abspath(snapshot_path)) not in Path(os.path.abspath(pointer_path)).parents: + raise ValueError( + "Invalid pointer path: cannot create pointer path in snapshot folder if" + f" `storage_folder='{storage_folder}'`, `revision='{revision}'` and" + f" `relative_filename='{relative_filename}'`." + ) + return pointer_path diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/hf_api.py b/venv/lib/python3.13/site-packages/huggingface_hub/hf_api.py new file mode 100644 index 0000000000000000000000000000000000000000..8977e202da9631530dee3009427eae6ad26ec17f --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/hf_api.py @@ -0,0 +1,11036 @@ +# coding=utf-8 +# Copyright 2019-present, the HuggingFace Inc. team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import inspect +import json +import re +import struct +import time +import warnings +from collections import defaultdict +from concurrent.futures import Future, ThreadPoolExecutor +from dataclasses import asdict, dataclass, field +from datetime import datetime +from functools import wraps +from itertools import islice +from pathlib import Path +from textwrap import dedent +from typing import ( + TYPE_CHECKING, + Any, + BinaryIO, + Callable, + Dict, + Iterable, + Iterator, + List, + Literal, + Optional, + Tuple, + Type, + TypeVar, + Union, + overload, +) +from urllib.parse import quote + +import requests +from requests.exceptions import HTTPError +from tqdm.auto import tqdm as base_tqdm +from tqdm.contrib.concurrent import thread_map + +from . import constants +from ._commit_api import ( + CommitOperation, + CommitOperationAdd, + CommitOperationCopy, + CommitOperationDelete, + _fetch_files_to_copy, + _fetch_upload_modes, + _prepare_commit_payload, + _upload_files, + _warn_on_overwriting_operations, +) +from ._inference_endpoints import InferenceEndpoint, InferenceEndpointType +from ._jobs_api import JobInfo, JobSpec, ScheduledJobInfo, _create_job_spec +from ._space_api import SpaceHardware, SpaceRuntime, SpaceStorage, SpaceVariable +from ._upload_large_folder import upload_large_folder_internal +from .community import ( + Discussion, + DiscussionComment, + DiscussionStatusChange, + DiscussionTitleChange, + DiscussionWithDetails, + deserialize_event, +) +from .constants import ( + DEFAULT_ETAG_TIMEOUT, # noqa: F401 # kept for backward compatibility + DEFAULT_REQUEST_TIMEOUT, # noqa: F401 # kept for backward compatibility + DEFAULT_REVISION, # noqa: F401 # kept for backward compatibility + DISCUSSION_STATUS, # noqa: F401 # kept for backward compatibility + DISCUSSION_TYPES, # noqa: F401 # kept for backward compatibility + ENDPOINT, # noqa: F401 # kept for backward compatibility + INFERENCE_ENDPOINTS_ENDPOINT, # noqa: F401 # kept for backward compatibility + REGEX_COMMIT_OID, # noqa: F401 # kept for backward compatibility + REPO_TYPE_MODEL, # noqa: F401 # kept for backward compatibility + REPO_TYPES, # noqa: F401 # kept for backward compatibility + REPO_TYPES_MAPPING, # noqa: F401 # kept for backward compatibility + REPO_TYPES_URL_PREFIXES, # noqa: F401 # kept for backward compatibility + SAFETENSORS_INDEX_FILE, # noqa: F401 # kept for backward compatibility + SAFETENSORS_MAX_HEADER_LENGTH, # noqa: F401 # kept for backward compatibility + SAFETENSORS_SINGLE_FILE, # noqa: F401 # kept for backward compatibility + SPACES_SDK_TYPES, # noqa: F401 # kept for backward compatibility + WEBHOOK_DOMAIN_T, # noqa: F401 # kept for backward compatibility + DiscussionStatusFilter, # noqa: F401 # kept for backward compatibility + DiscussionTypeFilter, # noqa: F401 # kept for backward compatibility +) +from .errors import ( + BadRequestError, + EntryNotFoundError, + GatedRepoError, + HfHubHTTPError, + RepositoryNotFoundError, + RevisionNotFoundError, +) +from .file_download import HfFileMetadata, get_hf_file_metadata, hf_hub_url +from .repocard_data import DatasetCardData, ModelCardData, SpaceCardData +from .utils import ( + DEFAULT_IGNORE_PATTERNS, + HfFolder, # noqa: F401 # kept for backward compatibility + LocalTokenNotFoundError, + NotASafetensorsRepoError, + SafetensorsFileMetadata, + SafetensorsParsingError, + SafetensorsRepoMetadata, + TensorInfo, + build_hf_headers, + chunk_iterable, + experimental, + filter_repo_objects, + fix_hf_endpoint_in_url, + get_session, + get_token, + hf_raise_for_status, + logging, + paginate, + parse_datetime, + validate_hf_hub_args, +) +from .utils import tqdm as hf_tqdm +from .utils._auth import _get_token_from_environment, _get_token_from_file, _get_token_from_google_colab +from .utils._deprecation import _deprecate_arguments, _deprecate_method +from .utils._typing import CallableT +from .utils.endpoint_helpers import _is_emission_within_threshold + + +if TYPE_CHECKING: + from .inference._providers import PROVIDER_T + +R = TypeVar("R") # Return type +CollectionItemType_T = Literal["model", "dataset", "space", "paper", "collection"] + +ExpandModelProperty_T = Literal[ + "author", + "baseModels", + "cardData", + "childrenModelCount", + "config", + "createdAt", + "disabled", + "downloads", + "downloadsAllTime", + "gated", + "gguf", + "inference", + "inferenceProviderMapping", + "lastModified", + "library_name", + "likes", + "mask_token", + "model-index", + "pipeline_tag", + "private", + "resourceGroup", + "safetensors", + "sha", + "siblings", + "spaces", + "tags", + "transformersInfo", + "trendingScore", + "usedStorage", + "widgetData", + "xetEnabled", +] + +ExpandDatasetProperty_T = Literal[ + "author", + "cardData", + "citation", + "createdAt", + "description", + "disabled", + "downloads", + "downloadsAllTime", + "gated", + "lastModified", + "likes", + "paperswithcode_id", + "private", + "resourceGroup", + "sha", + "siblings", + "tags", + "trendingScore", + "usedStorage", + "xetEnabled", +] + +ExpandSpaceProperty_T = Literal[ + "author", + "cardData", + "createdAt", + "datasets", + "disabled", + "lastModified", + "likes", + "models", + "private", + "resourceGroup", + "runtime", + "sdk", + "sha", + "siblings", + "subdomain", + "tags", + "trendingScore", + "usedStorage", + "xetEnabled", +] + +USERNAME_PLACEHOLDER = "hf_user" +_REGEX_DISCUSSION_URL = re.compile(r".*/discussions/(\d+)$") + +_CREATE_COMMIT_NO_REPO_ERROR_MESSAGE = ( + "\nNote: Creating a commit assumes that the repo already exists on the" + " Huggingface Hub. Please use `create_repo` if it's not the case." +) +_AUTH_CHECK_NO_REPO_ERROR_MESSAGE = ( + "\nNote: The repository either does not exist or you do not have access rights." + " Please check the repository ID and your access permissions." + " If this is a private repository, ensure that your token is correct." +) +logger = logging.get_logger(__name__) + + +def repo_type_and_id_from_hf_id(hf_id: str, hub_url: Optional[str] = None) -> Tuple[Optional[str], Optional[str], str]: + """ + Returns the repo type and ID from a huggingface.co URL linking to a + repository + + Args: + hf_id (`str`): + An URL or ID of a repository on the HF hub. Accepted values are: + + - https://huggingface.co/// + - https://huggingface.co// + - hf://// + - hf:/// + - // + - / + - + hub_url (`str`, *optional*): + The URL of the HuggingFace Hub, defaults to https://huggingface.co + + Returns: + A tuple with three items: repo_type (`str` or `None`), namespace (`str` or + `None`) and repo_id (`str`). + + Raises: + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + If URL cannot be parsed. + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + If `repo_type` is unknown. + """ + input_hf_id = hf_id + + hub_url = re.sub(r"https?://", "", hub_url if hub_url is not None else constants.ENDPOINT) + is_hf_url = hub_url in hf_id and "@" not in hf_id + + HFFS_PREFIX = "hf://" + if hf_id.startswith(HFFS_PREFIX): # Remove "hf://" prefix if exists + hf_id = hf_id[len(HFFS_PREFIX) :] + + url_segments = hf_id.split("/") + is_hf_id = len(url_segments) <= 3 + + namespace: Optional[str] + if is_hf_url: + namespace, repo_id = url_segments[-2:] + if namespace == hub_url: + namespace = None + if len(url_segments) > 2 and hub_url not in url_segments[-3]: + repo_type = url_segments[-3] + elif namespace in constants.REPO_TYPES_MAPPING: + # Mean canonical dataset or model + repo_type = constants.REPO_TYPES_MAPPING[namespace] + namespace = None + else: + repo_type = None + elif is_hf_id: + if len(url_segments) == 3: + # Passed // or // + repo_type, namespace, repo_id = url_segments[-3:] + elif len(url_segments) == 2: + if url_segments[0] in constants.REPO_TYPES_MAPPING: + # Passed '' or 'datasets/' for a canonical model or dataset + repo_type = constants.REPO_TYPES_MAPPING[url_segments[0]] + namespace = None + repo_id = hf_id.split("/")[-1] + else: + # Passed / or / + namespace, repo_id = hf_id.split("/")[-2:] + repo_type = None + else: + # Passed + repo_id = url_segments[0] + namespace, repo_type = None, None + else: + raise ValueError(f"Unable to retrieve user and repo ID from the passed HF ID: {hf_id}") + + # Check if repo type is known (mapping "spaces" => "space" + empty value => `None`) + if repo_type in constants.REPO_TYPES_MAPPING: + repo_type = constants.REPO_TYPES_MAPPING[repo_type] + if repo_type == "": + repo_type = None + if repo_type not in constants.REPO_TYPES: + raise ValueError(f"Unknown `repo_type`: '{repo_type}' ('{input_hf_id}')") + + return repo_type, namespace, repo_id + + +@dataclass +class LastCommitInfo(dict): + oid: str + title: str + date: datetime + + def __post_init__(self): # hack to make LastCommitInfo backward compatible + self.update(asdict(self)) + + +@dataclass +class BlobLfsInfo(dict): + size: int + sha256: str + pointer_size: int + + def __post_init__(self): # hack to make BlobLfsInfo backward compatible + self.update(asdict(self)) + + +@dataclass +class BlobSecurityInfo(dict): + safe: bool # duplicate information with "status" field, keeping it for backward compatibility + status: str + av_scan: Optional[Dict] + pickle_import_scan: Optional[Dict] + + def __post_init__(self): # hack to make BlogSecurityInfo backward compatible + self.update(asdict(self)) + + +@dataclass +class TransformersInfo(dict): + auto_model: str + custom_class: Optional[str] = None + # possible `pipeline_tag` values: https://github.com/huggingface/huggingface.js/blob/3ee32554b8620644a6287e786b2a83bf5caf559c/packages/tasks/src/pipelines.ts#L72 + pipeline_tag: Optional[str] = None + processor: Optional[str] = None + + def __post_init__(self): # hack to make TransformersInfo backward compatible + self.update(asdict(self)) + + +@dataclass +class SafeTensorsInfo(dict): + parameters: Dict[str, int] + total: int + + def __post_init__(self): # hack to make SafeTensorsInfo backward compatible + self.update(asdict(self)) + + +@dataclass +class CommitInfo(str): + """Data structure containing information about a newly created commit. + + Returned by any method that creates a commit on the Hub: [`create_commit`], [`upload_file`], [`upload_folder`], + [`delete_file`], [`delete_folder`]. It inherits from `str` for backward compatibility but using methods specific + to `str` is deprecated. + + Attributes: + commit_url (`str`): + Url where to find the commit. + + commit_message (`str`): + The summary (first line) of the commit that has been created. + + commit_description (`str`): + Description of the commit that has been created. Can be empty. + + oid (`str`): + Commit hash id. Example: `"91c54ad1727ee830252e457677f467be0bfd8a57"`. + + pr_url (`str`, *optional*): + Url to the PR that has been created, if any. Populated when `create_pr=True` + is passed. + + pr_revision (`str`, *optional*): + Revision of the PR that has been created, if any. Populated when + `create_pr=True` is passed. Example: `"refs/pr/1"`. + + pr_num (`int`, *optional*): + Number of the PR discussion that has been created, if any. Populated when + `create_pr=True` is passed. Can be passed as `discussion_num` in + [`get_discussion_details`]. Example: `1`. + + repo_url (`RepoUrl`): + Repo URL of the commit containing info like repo_id, repo_type, etc. + + _url (`str`, *optional*): + Legacy url for `str` compatibility. Can be the url to the uploaded file on the Hub (if returned by + [`upload_file`]), to the uploaded folder on the Hub (if returned by [`upload_folder`]) or to the commit on + the Hub (if returned by [`create_commit`]). Defaults to `commit_url`. It is deprecated to use this + attribute. Please use `commit_url` instead. + """ + + commit_url: str + commit_message: str + commit_description: str + oid: str + pr_url: Optional[str] = None + + # Computed from `commit_url` in `__post_init__` + repo_url: RepoUrl = field(init=False) + + # Computed from `pr_url` in `__post_init__` + pr_revision: Optional[str] = field(init=False) + pr_num: Optional[str] = field(init=False) + + # legacy url for `str` compatibility (ex: url to uploaded file, url to uploaded folder, url to PR, etc.) + _url: str = field(repr=False, default=None) # type: ignore # defaults to `commit_url` + + def __new__(cls, *args, commit_url: str, _url: Optional[str] = None, **kwargs): + return str.__new__(cls, _url or commit_url) + + def __post_init__(self): + """Populate pr-related fields after initialization. + + See https://docs.python.org/3.10/library/dataclasses.html#post-init-processing. + """ + # Repo info + self.repo_url = RepoUrl(self.commit_url.split("/commit/")[0]) + + # PR info + if self.pr_url is not None: + self.pr_revision = _parse_revision_from_pr_url(self.pr_url) + self.pr_num = int(self.pr_revision.split("/")[-1]) + else: + self.pr_revision = None + self.pr_num = None + + +@dataclass +class AccessRequest: + """Data structure containing information about a user access request. + + Attributes: + username (`str`): + Username of the user who requested access. + fullname (`str`): + Fullname of the user who requested access. + email (`Optional[str]`): + Email of the user who requested access. + Can only be `None` in the /accepted list if the user was granted access manually. + timestamp (`datetime`): + Timestamp of the request. + status (`Literal["pending", "accepted", "rejected"]`): + Status of the request. Can be one of `["pending", "accepted", "rejected"]`. + fields (`Dict[str, Any]`, *optional*): + Additional fields filled by the user in the gate form. + """ + + username: str + fullname: str + email: Optional[str] + timestamp: datetime + status: Literal["pending", "accepted", "rejected"] + + # Additional fields filled by the user in the gate form + fields: Optional[Dict[str, Any]] = None + + +@dataclass +class WebhookWatchedItem: + """Data structure containing information about the items watched by a webhook. + + Attributes: + type (`Literal["dataset", "model", "org", "space", "user"]`): + Type of the item to be watched. Can be one of `["dataset", "model", "org", "space", "user"]`. + name (`str`): + Name of the item to be watched. Can be the username, organization name, model name, dataset name or space name. + """ + + type: Literal["dataset", "model", "org", "space", "user"] + name: str + + +@dataclass +class WebhookInfo: + """Data structure containing information about a webhook. + + One of `url` or `job` is specified, but not both. + + Attributes: + id (`str`): + ID of the webhook. + url (`str`, *optional*): + URL of the webhook. + job (`JobSpec`, *optional*): + Specifications of the Job to trigger. + watched (`List[WebhookWatchedItem]`): + List of items watched by the webhook, see [`WebhookWatchedItem`]. + domains (`List[WEBHOOK_DOMAIN_T]`): + List of domains the webhook is watching. Can be one of `["repo", "discussions"]`. + secret (`str`, *optional*): + Secret of the webhook. + disabled (`bool`): + Whether the webhook is disabled or not. + """ + + id: str + url: Optional[str] + job: Optional[JobSpec] + watched: List[WebhookWatchedItem] + domains: List[constants.WEBHOOK_DOMAIN_T] + secret: Optional[str] + disabled: bool + + +class RepoUrl(str): + """Subclass of `str` describing a repo URL on the Hub. + + `RepoUrl` is returned by `HfApi.create_repo`. It inherits from `str` for backward + compatibility. At initialization, the URL is parsed to populate properties: + - endpoint (`str`) + - namespace (`Optional[str]`) + - repo_name (`str`) + - repo_id (`str`) + - repo_type (`Literal["model", "dataset", "space"]`) + - url (`str`) + + Args: + url (`Any`): + String value of the repo url. + endpoint (`str`, *optional*): + Endpoint of the Hub. Defaults to . + + Example: + ```py + >>> RepoUrl('https://huggingface.co/gpt2') + RepoUrl('https://huggingface.co/gpt2', endpoint='https://huggingface.co', repo_type='model', repo_id='gpt2') + + >>> RepoUrl('https://hub-ci.huggingface.co/datasets/dummy_user/dummy_dataset', endpoint='https://hub-ci.huggingface.co') + RepoUrl('https://hub-ci.huggingface.co/datasets/dummy_user/dummy_dataset', endpoint='https://hub-ci.huggingface.co', repo_type='dataset', repo_id='dummy_user/dummy_dataset') + + >>> RepoUrl('hf://datasets/my-user/my-dataset') + RepoUrl('hf://datasets/my-user/my-dataset', endpoint='https://huggingface.co', repo_type='dataset', repo_id='user/dataset') + + >>> HfApi.create_repo("dummy_model") + RepoUrl('https://huggingface.co/Wauplin/dummy_model', endpoint='https://huggingface.co', repo_type='model', repo_id='Wauplin/dummy_model') + ``` + + Raises: + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + If URL cannot be parsed. + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + If `repo_type` is unknown. + """ + + def __new__(cls, url: Any, endpoint: Optional[str] = None): + url = fix_hf_endpoint_in_url(url, endpoint=endpoint) + return super(RepoUrl, cls).__new__(cls, url) + + def __init__(self, url: Any, endpoint: Optional[str] = None) -> None: + super().__init__() + # Parse URL + self.endpoint = endpoint or constants.ENDPOINT + repo_type, namespace, repo_name = repo_type_and_id_from_hf_id(self, hub_url=self.endpoint) + + # Populate fields + self.namespace = namespace + self.repo_name = repo_name + self.repo_id = repo_name if namespace is None else f"{namespace}/{repo_name}" + self.repo_type = repo_type or constants.REPO_TYPE_MODEL + self.url = str(self) # just in case it's needed + + def __repr__(self) -> str: + return f"RepoUrl('{self}', endpoint='{self.endpoint}', repo_type='{self.repo_type}', repo_id='{self.repo_id}')" + + +@dataclass +class RepoSibling: + """ + Contains basic information about a repo file inside a repo on the Hub. + + > [!TIP] + > All attributes of this class are optional except `rfilename`. This is because only the file names are returned when + > listing repositories on the Hub (with [`list_models`], [`list_datasets`] or [`list_spaces`]). If you need more + > information like file size, blob id or lfs details, you must request them specifically from one repo at a time + > (using [`model_info`], [`dataset_info`] or [`space_info`]) as it adds more constraints on the backend server to + > retrieve these. + + Attributes: + rfilename (str): + file name, relative to the repo root. + size (`int`, *optional*): + The file's size, in bytes. This attribute is defined when `files_metadata` argument of [`repo_info`] is set + to `True`. It's `None` otherwise. + blob_id (`str`, *optional*): + The file's git OID. This attribute is defined when `files_metadata` argument of [`repo_info`] is set to + `True`. It's `None` otherwise. + lfs (`BlobLfsInfo`, *optional*): + The file's LFS metadata. This attribute is defined when`files_metadata` argument of [`repo_info`] is set to + `True` and the file is stored with Git LFS. It's `None` otherwise. + """ + + rfilename: str + size: Optional[int] = None + blob_id: Optional[str] = None + lfs: Optional[BlobLfsInfo] = None + + +@dataclass +class RepoFile: + """ + Contains information about a file on the Hub. + + Attributes: + path (str): + file path relative to the repo root. + size (`int`): + The file's size, in bytes. + blob_id (`str`): + The file's git OID. + lfs (`BlobLfsInfo`): + The file's LFS metadata. + last_commit (`LastCommitInfo`, *optional*): + The file's last commit metadata. Only defined if [`list_repo_tree`] and [`get_paths_info`] + are called with `expand=True`. + security (`BlobSecurityInfo`, *optional*): + The file's security scan metadata. Only defined if [`list_repo_tree`] and [`get_paths_info`] + are called with `expand=True`. + """ + + path: str + size: int + blob_id: str + lfs: Optional[BlobLfsInfo] = None + last_commit: Optional[LastCommitInfo] = None + security: Optional[BlobSecurityInfo] = None + + def __init__(self, **kwargs): + self.path = kwargs.pop("path") + self.size = kwargs.pop("size") + self.blob_id = kwargs.pop("oid") + lfs = kwargs.pop("lfs", None) + if lfs is not None: + lfs = BlobLfsInfo(size=lfs["size"], sha256=lfs["oid"], pointer_size=lfs["pointerSize"]) + self.lfs = lfs + last_commit = kwargs.pop("lastCommit", None) or kwargs.pop("last_commit", None) + if last_commit is not None: + last_commit = LastCommitInfo( + oid=last_commit["id"], title=last_commit["title"], date=parse_datetime(last_commit["date"]) + ) + self.last_commit = last_commit + security = kwargs.pop("securityFileStatus", None) + if security is not None: + safe = security["status"] == "safe" + security = BlobSecurityInfo( + safe=safe, + status=security["status"], + av_scan=security["avScan"], + pickle_import_scan=security["pickleImportScan"], + ) + self.security = security + + # backwards compatibility + self.rfilename = self.path + self.lastCommit = self.last_commit + + +@dataclass +class RepoFolder: + """ + Contains information about a folder on the Hub. + + Attributes: + path (str): + folder path relative to the repo root. + tree_id (`str`): + The folder's git OID. + last_commit (`LastCommitInfo`, *optional*): + The folder's last commit metadata. Only defined if [`list_repo_tree`] and [`get_paths_info`] + are called with `expand=True`. + """ + + path: str + tree_id: str + last_commit: Optional[LastCommitInfo] = None + + def __init__(self, **kwargs): + self.path = kwargs.pop("path") + self.tree_id = kwargs.pop("oid") + last_commit = kwargs.pop("lastCommit", None) or kwargs.pop("last_commit", None) + if last_commit is not None: + last_commit = LastCommitInfo( + oid=last_commit["id"], title=last_commit["title"], date=parse_datetime(last_commit["date"]) + ) + self.last_commit = last_commit + + +@dataclass +class InferenceProviderMapping: + provider: "PROVIDER_T" # Provider name + hf_model_id: str # ID of the model on the Hugging Face Hub + provider_id: str # ID of the model on the provider's side + status: Literal["error", "live", "staging"] + task: str + + adapter: Optional[str] = None + adapter_weights_path: Optional[str] = None + type: Optional[Literal["single-model", "tag-filter"]] = None + + def __init__(self, **kwargs): + self.provider = kwargs.pop("provider") + self.hf_model_id = kwargs.pop("hf_model_id") + self.provider_id = kwargs.pop("providerId") + self.status = kwargs.pop("status") + self.task = kwargs.pop("task") + + self.adapter = kwargs.pop("adapter", None) + self.adapter_weights_path = kwargs.pop("adapterWeightsPath", None) + self.type = kwargs.pop("type", None) + self.__dict__.update(**kwargs) + + +@dataclass +class ModelInfo: + """ + Contains information about a model on the Hub. This object is returned by [`model_info`] and [`list_models`]. + + > [!TIP] + > Most attributes of this class are optional. This is because the data returned by the Hub depends on the query made. + > In general, the more specific the query, the more information is returned. On the contrary, when listing models + > using [`list_models`] only a subset of the attributes are returned. + + Attributes: + id (`str`): + ID of model. + author (`str`, *optional*): + Author of the model. + sha (`str`, *optional*): + Repo SHA at this particular revision. + created_at (`datetime`, *optional*): + Date of creation of the repo on the Hub. Note that the lowest value is `2022-03-02T23:29:04.000Z`, + corresponding to the date when we began to store creation dates. + last_modified (`datetime`, *optional*): + Date of last commit to the repo. + private (`bool`): + Is the repo private. + disabled (`bool`, *optional*): + Is the repo disabled. + downloads (`int`): + Number of downloads of the model over the last 30 days. + downloads_all_time (`int`): + Cumulated number of downloads of the model since its creation. + gated (`Literal["auto", "manual", False]`, *optional*): + Is the repo gated. + If so, whether there is manual or automatic approval. + gguf (`Dict`, *optional*): + GGUF information of the model. + inference (`Literal["warm"]`, *optional*): + Status of the model on Inference Providers. Warm if the model is served by at least one provider. + inference_provider_mapping (`List[InferenceProviderMapping]`, *optional*): + A list of [`InferenceProviderMapping`] ordered after the user's provider order. + likes (`int`): + Number of likes of the model. + library_name (`str`, *optional*): + Library associated with the model. + tags (`List[str]`): + List of tags of the model. Compared to `card_data.tags`, contains extra tags computed by the Hub + (e.g. supported libraries, model's arXiv). + pipeline_tag (`str`, *optional*): + Pipeline tag associated with the model. + mask_token (`str`, *optional*): + Mask token used by the model. + widget_data (`Any`, *optional*): + Widget data associated with the model. + model_index (`Dict`, *optional*): + Model index for evaluation. + config (`Dict`, *optional*): + Model configuration. + transformers_info (`TransformersInfo`, *optional*): + Transformers-specific info (auto class, processor, etc.) associated with the model. + trending_score (`int`, *optional*): + Trending score of the model. + card_data (`ModelCardData`, *optional*): + Model Card Metadata as a [`huggingface_hub.repocard_data.ModelCardData`] object. + siblings (`List[RepoSibling]`): + List of [`huggingface_hub.hf_api.RepoSibling`] objects that constitute the model. + spaces (`List[str]`, *optional*): + List of spaces using the model. + safetensors (`SafeTensorsInfo`, *optional*): + Model's safetensors information. + security_repo_status (`Dict`, *optional*): + Model's security scan status. + """ + + id: str + author: Optional[str] + sha: Optional[str] + created_at: Optional[datetime] + last_modified: Optional[datetime] + private: Optional[bool] + disabled: Optional[bool] + downloads: Optional[int] + downloads_all_time: Optional[int] + gated: Optional[Literal["auto", "manual", False]] + gguf: Optional[Dict] + inference: Optional[Literal["warm"]] + inference_provider_mapping: Optional[List[InferenceProviderMapping]] + likes: Optional[int] + library_name: Optional[str] + tags: Optional[List[str]] + pipeline_tag: Optional[str] + mask_token: Optional[str] + card_data: Optional[ModelCardData] + widget_data: Optional[Any] + model_index: Optional[Dict] + config: Optional[Dict] + transformers_info: Optional[TransformersInfo] + trending_score: Optional[int] + siblings: Optional[List[RepoSibling]] + spaces: Optional[List[str]] + safetensors: Optional[SafeTensorsInfo] + security_repo_status: Optional[Dict] + xet_enabled: Optional[bool] + + def __init__(self, **kwargs): + self.id = kwargs.pop("id") + self.author = kwargs.pop("author", None) + self.sha = kwargs.pop("sha", None) + last_modified = kwargs.pop("lastModified", None) or kwargs.pop("last_modified", None) + self.last_modified = parse_datetime(last_modified) if last_modified else None + created_at = kwargs.pop("createdAt", None) or kwargs.pop("created_at", None) + self.created_at = parse_datetime(created_at) if created_at else None + self.private = kwargs.pop("private", None) + self.gated = kwargs.pop("gated", None) + self.disabled = kwargs.pop("disabled", None) + self.downloads = kwargs.pop("downloads", None) + self.downloads_all_time = kwargs.pop("downloadsAllTime", None) + self.likes = kwargs.pop("likes", None) + self.library_name = kwargs.pop("library_name", None) + self.gguf = kwargs.pop("gguf", None) + + self.inference = kwargs.pop("inference", None) + + # little hack to simplify Inference Providers logic and make it backward and forward compatible + # right now, API returns a dict on model_info and a list on list_models. Let's harmonize to list. + mapping = kwargs.pop("inferenceProviderMapping", None) + if isinstance(mapping, list): + self.inference_provider_mapping = [ + InferenceProviderMapping(**{**value, "hf_model_id": self.id}) for value in mapping + ] + elif isinstance(mapping, dict): + self.inference_provider_mapping = [ + InferenceProviderMapping(**{**value, "hf_model_id": self.id, "provider": provider}) + for provider, value in mapping.items() + ] + elif mapping is None: + self.inference_provider_mapping = None + else: + raise ValueError( + f"Unexpected type for `inferenceProviderMapping`. Expecting `dict` or `list`. Got {mapping}." + ) + + self.tags = kwargs.pop("tags", None) + self.pipeline_tag = kwargs.pop("pipeline_tag", None) + self.mask_token = kwargs.pop("mask_token", None) + self.trending_score = kwargs.pop("trendingScore", None) + + card_data = kwargs.pop("cardData", None) or kwargs.pop("card_data", None) + self.card_data = ( + ModelCardData(**card_data, ignore_metadata_errors=True) if isinstance(card_data, dict) else card_data + ) + + self.widget_data = kwargs.pop("widgetData", None) + self.model_index = kwargs.pop("model-index", None) or kwargs.pop("model_index", None) + self.config = kwargs.pop("config", None) + transformers_info = kwargs.pop("transformersInfo", None) or kwargs.pop("transformers_info", None) + self.transformers_info = TransformersInfo(**transformers_info) if transformers_info else None + siblings = kwargs.pop("siblings", None) + self.siblings = ( + [ + RepoSibling( + rfilename=sibling["rfilename"], + size=sibling.get("size"), + blob_id=sibling.get("blobId"), + lfs=( + BlobLfsInfo( + size=sibling["lfs"]["size"], + sha256=sibling["lfs"]["sha256"], + pointer_size=sibling["lfs"]["pointerSize"], + ) + if sibling.get("lfs") + else None + ), + ) + for sibling in siblings + ] + if siblings is not None + else None + ) + self.spaces = kwargs.pop("spaces", None) + safetensors = kwargs.pop("safetensors", None) + self.safetensors = ( + SafeTensorsInfo( + parameters=safetensors["parameters"], + total=safetensors["total"], + ) + if safetensors + else None + ) + self.security_repo_status = kwargs.pop("securityRepoStatus", None) + self.xet_enabled = kwargs.pop("xetEnabled", None) + # backwards compatibility + self.lastModified = self.last_modified + self.cardData = self.card_data + self.transformersInfo = self.transformers_info + self.__dict__.update(**kwargs) + + +@dataclass +class DatasetInfo: + """ + Contains information about a dataset on the Hub. This object is returned by [`dataset_info`] and [`list_datasets`]. + + > [!TIP] + > Most attributes of this class are optional. This is because the data returned by the Hub depends on the query made. + > In general, the more specific the query, the more information is returned. On the contrary, when listing datasets + > using [`list_datasets`] only a subset of the attributes are returned. + + Attributes: + id (`str`): + ID of dataset. + author (`str`): + Author of the dataset. + sha (`str`): + Repo SHA at this particular revision. + created_at (`datetime`, *optional*): + Date of creation of the repo on the Hub. Note that the lowest value is `2022-03-02T23:29:04.000Z`, + corresponding to the date when we began to store creation dates. + last_modified (`datetime`, *optional*): + Date of last commit to the repo. + private (`bool`): + Is the repo private. + disabled (`bool`, *optional*): + Is the repo disabled. + gated (`Literal["auto", "manual", False]`, *optional*): + Is the repo gated. + If so, whether there is manual or automatic approval. + downloads (`int`): + Number of downloads of the dataset over the last 30 days. + downloads_all_time (`int`): + Cumulated number of downloads of the model since its creation. + likes (`int`): + Number of likes of the dataset. + tags (`List[str]`): + List of tags of the dataset. + card_data (`DatasetCardData`, *optional*): + Model Card Metadata as a [`huggingface_hub.repocard_data.DatasetCardData`] object. + siblings (`List[RepoSibling]`): + List of [`huggingface_hub.hf_api.RepoSibling`] objects that constitute the dataset. + paperswithcode_id (`str`, *optional*): + Papers with code ID of the dataset. + trending_score (`int`, *optional*): + Trending score of the dataset. + """ + + id: str + author: Optional[str] + sha: Optional[str] + created_at: Optional[datetime] + last_modified: Optional[datetime] + private: Optional[bool] + gated: Optional[Literal["auto", "manual", False]] + disabled: Optional[bool] + downloads: Optional[int] + downloads_all_time: Optional[int] + likes: Optional[int] + paperswithcode_id: Optional[str] + tags: Optional[List[str]] + trending_score: Optional[int] + card_data: Optional[DatasetCardData] + siblings: Optional[List[RepoSibling]] + xet_enabled: Optional[bool] + + def __init__(self, **kwargs): + self.id = kwargs.pop("id") + self.author = kwargs.pop("author", None) + self.sha = kwargs.pop("sha", None) + created_at = kwargs.pop("createdAt", None) or kwargs.pop("created_at", None) + self.created_at = parse_datetime(created_at) if created_at else None + last_modified = kwargs.pop("lastModified", None) or kwargs.pop("last_modified", None) + self.last_modified = parse_datetime(last_modified) if last_modified else None + self.private = kwargs.pop("private", None) + self.gated = kwargs.pop("gated", None) + self.disabled = kwargs.pop("disabled", None) + self.downloads = kwargs.pop("downloads", None) + self.downloads_all_time = kwargs.pop("downloadsAllTime", None) + self.likes = kwargs.pop("likes", None) + self.paperswithcode_id = kwargs.pop("paperswithcode_id", None) + self.tags = kwargs.pop("tags", None) + self.trending_score = kwargs.pop("trendingScore", None) + + card_data = kwargs.pop("cardData", None) or kwargs.pop("card_data", None) + self.card_data = ( + DatasetCardData(**card_data, ignore_metadata_errors=True) if isinstance(card_data, dict) else card_data + ) + siblings = kwargs.pop("siblings", None) + self.siblings = ( + [ + RepoSibling( + rfilename=sibling["rfilename"], + size=sibling.get("size"), + blob_id=sibling.get("blobId"), + lfs=( + BlobLfsInfo( + size=sibling["lfs"]["size"], + sha256=sibling["lfs"]["sha256"], + pointer_size=sibling["lfs"]["pointerSize"], + ) + if sibling.get("lfs") + else None + ), + ) + for sibling in siblings + ] + if siblings is not None + else None + ) + self.xet_enabled = kwargs.pop("xetEnabled", None) + # backwards compatibility + self.lastModified = self.last_modified + self.cardData = self.card_data + self.__dict__.update(**kwargs) + + +@dataclass +class SpaceInfo: + """ + Contains information about a Space on the Hub. This object is returned by [`space_info`] and [`list_spaces`]. + + > [!TIP] + > Most attributes of this class are optional. This is because the data returned by the Hub depends on the query made. + > In general, the more specific the query, the more information is returned. On the contrary, when listing spaces + > using [`list_spaces`] only a subset of the attributes are returned. + + Attributes: + id (`str`): + ID of the Space. + author (`str`, *optional*): + Author of the Space. + sha (`str`, *optional*): + Repo SHA at this particular revision. + created_at (`datetime`, *optional*): + Date of creation of the repo on the Hub. Note that the lowest value is `2022-03-02T23:29:04.000Z`, + corresponding to the date when we began to store creation dates. + last_modified (`datetime`, *optional*): + Date of last commit to the repo. + private (`bool`): + Is the repo private. + gated (`Literal["auto", "manual", False]`, *optional*): + Is the repo gated. + If so, whether there is manual or automatic approval. + disabled (`bool`, *optional*): + Is the Space disabled. + host (`str`, *optional*): + Host URL of the Space. + subdomain (`str`, *optional*): + Subdomain of the Space. + likes (`int`): + Number of likes of the Space. + tags (`List[str]`): + List of tags of the Space. + siblings (`List[RepoSibling]`): + List of [`huggingface_hub.hf_api.RepoSibling`] objects that constitute the Space. + card_data (`SpaceCardData`, *optional*): + Space Card Metadata as a [`huggingface_hub.repocard_data.SpaceCardData`] object. + runtime (`SpaceRuntime`, *optional*): + Space runtime information as a [`huggingface_hub.hf_api.SpaceRuntime`] object. + sdk (`str`, *optional*): + SDK used by the Space. + models (`List[str]`, *optional*): + List of models used by the Space. + datasets (`List[str]`, *optional*): + List of datasets used by the Space. + trending_score (`int`, *optional*): + Trending score of the Space. + """ + + id: str + author: Optional[str] + sha: Optional[str] + created_at: Optional[datetime] + last_modified: Optional[datetime] + private: Optional[bool] + gated: Optional[Literal["auto", "manual", False]] + disabled: Optional[bool] + host: Optional[str] + subdomain: Optional[str] + likes: Optional[int] + sdk: Optional[str] + tags: Optional[List[str]] + siblings: Optional[List[RepoSibling]] + trending_score: Optional[int] + card_data: Optional[SpaceCardData] + runtime: Optional[SpaceRuntime] + models: Optional[List[str]] + datasets: Optional[List[str]] + xet_enabled: Optional[bool] + + def __init__(self, **kwargs): + self.id = kwargs.pop("id") + self.author = kwargs.pop("author", None) + self.sha = kwargs.pop("sha", None) + created_at = kwargs.pop("createdAt", None) or kwargs.pop("created_at", None) + self.created_at = parse_datetime(created_at) if created_at else None + last_modified = kwargs.pop("lastModified", None) or kwargs.pop("last_modified", None) + self.last_modified = parse_datetime(last_modified) if last_modified else None + self.private = kwargs.pop("private", None) + self.gated = kwargs.pop("gated", None) + self.disabled = kwargs.pop("disabled", None) + self.host = kwargs.pop("host", None) + self.subdomain = kwargs.pop("subdomain", None) + self.likes = kwargs.pop("likes", None) + self.sdk = kwargs.pop("sdk", None) + self.tags = kwargs.pop("tags", None) + self.trending_score = kwargs.pop("trendingScore", None) + card_data = kwargs.pop("cardData", None) or kwargs.pop("card_data", None) + self.card_data = ( + SpaceCardData(**card_data, ignore_metadata_errors=True) if isinstance(card_data, dict) else card_data + ) + siblings = kwargs.pop("siblings", None) + self.siblings = ( + [ + RepoSibling( + rfilename=sibling["rfilename"], + size=sibling.get("size"), + blob_id=sibling.get("blobId"), + lfs=( + BlobLfsInfo( + size=sibling["lfs"]["size"], + sha256=sibling["lfs"]["sha256"], + pointer_size=sibling["lfs"]["pointerSize"], + ) + if sibling.get("lfs") + else None + ), + ) + for sibling in siblings + ] + if siblings is not None + else None + ) + runtime = kwargs.pop("runtime", None) + self.runtime = SpaceRuntime(runtime) if runtime else None + self.models = kwargs.pop("models", None) + self.datasets = kwargs.pop("datasets", None) + self.xet_enabled = kwargs.pop("xetEnabled", None) + # backwards compatibility + self.lastModified = self.last_modified + self.cardData = self.card_data + self.__dict__.update(**kwargs) + + +@dataclass +class CollectionItem: + """ + Contains information about an item of a Collection (model, dataset, Space, paper or collection). + + Attributes: + item_object_id (`str`): + Unique ID of the item in the collection. + item_id (`str`): + ID of the underlying object on the Hub. Can be either a repo_id, a paper id or a collection slug. + e.g. `"jbilcke-hf/ai-comic-factory"`, `"2307.09288"`, `"celinah/cerebras-function-calling-682607169c35fbfa98b30b9a"`. + item_type (`str`): + Type of the underlying object. Can be one of `"model"`, `"dataset"`, `"space"`, `"paper"` or `"collection"`. + position (`int`): + Position of the item in the collection. + note (`str`, *optional*): + Note associated with the item, as plain text. + """ + + item_object_id: str # id in database + item_id: str # repo_id or paper id + item_type: str + position: int + note: Optional[str] = None + + def __init__( + self, + _id: str, + id: str, + type: CollectionItemType_T, + position: int, + note: Optional[Dict] = None, + **kwargs, + ) -> None: + self.item_object_id: str = _id # id in database + self.item_id: str = id # repo_id or paper id + # if the item is a collection, override item_id with the slug + slug = kwargs.get("slug") + if slug is not None: + self.item_id = slug # collection slug + self.item_type: CollectionItemType_T = type + self.position: int = position + self.note: str = note["text"] if note is not None else None + + +@dataclass +class Collection: + """ + Contains information about a Collection on the Hub. + + Attributes: + slug (`str`): + Slug of the collection. E.g. `"TheBloke/recent-models-64f9a55bb3115b4f513ec026"`. + title (`str`): + Title of the collection. E.g. `"Recent models"`. + owner (`str`): + Owner of the collection. E.g. `"TheBloke"`. + items (`List[CollectionItem]`): + List of items in the collection. + last_updated (`datetime`): + Date of the last update of the collection. + position (`int`): + Position of the collection in the list of collections of the owner. + private (`bool`): + Whether the collection is private or not. + theme (`str`): + Theme of the collection. E.g. `"green"`. + upvotes (`int`): + Number of upvotes of the collection. + description (`str`, *optional*): + Description of the collection, as plain text. + url (`str`): + (property) URL of the collection on the Hub. + """ + + slug: str + title: str + owner: str + items: List[CollectionItem] + last_updated: datetime + position: int + private: bool + theme: str + upvotes: int + description: Optional[str] = None + + def __init__(self, **kwargs) -> None: + self.slug = kwargs.pop("slug") + self.title = kwargs.pop("title") + self.owner = kwargs.pop("owner") + self.items = [CollectionItem(**item) for item in kwargs.pop("items")] + self.last_updated = parse_datetime(kwargs.pop("lastUpdated")) + self.position = kwargs.pop("position") + self.private = kwargs.pop("private") + self.theme = kwargs.pop("theme") + self.upvotes = kwargs.pop("upvotes") + self.description = kwargs.pop("description", None) + endpoint = kwargs.pop("endpoint", None) + if endpoint is None: + endpoint = constants.ENDPOINT + self._url = f"{endpoint}/collections/{self.slug}" + + @property + def url(self) -> str: + """Returns the URL of the collection on the Hub.""" + return self._url + + +@dataclass +class GitRefInfo: + """ + Contains information about a git reference for a repo on the Hub. + + Attributes: + name (`str`): + Name of the reference (e.g. tag name or branch name). + ref (`str`): + Full git ref on the Hub (e.g. `"refs/heads/main"` or `"refs/tags/v1.0"`). + target_commit (`str`): + OID of the target commit for the ref (e.g. `"e7da7f221d5bf496a48136c0cd264e630fe9fcc8"`) + """ + + name: str + ref: str + target_commit: str + + +@dataclass +class GitRefs: + """ + Contains information about all git references for a repo on the Hub. + + Object is returned by [`list_repo_refs`]. + + Attributes: + branches (`List[GitRefInfo]`): + A list of [`GitRefInfo`] containing information about branches on the repo. + converts (`List[GitRefInfo]`): + A list of [`GitRefInfo`] containing information about "convert" refs on the repo. + Converts are refs used (internally) to push preprocessed data in Dataset repos. + tags (`List[GitRefInfo]`): + A list of [`GitRefInfo`] containing information about tags on the repo. + pull_requests (`List[GitRefInfo]`, *optional*): + A list of [`GitRefInfo`] containing information about pull requests on the repo. + Only returned if `include_prs=True` is set. + """ + + branches: List[GitRefInfo] + converts: List[GitRefInfo] + tags: List[GitRefInfo] + pull_requests: Optional[List[GitRefInfo]] = None + + +@dataclass +class GitCommitInfo: + """ + Contains information about a git commit for a repo on the Hub. Check out [`list_repo_commits`] for more details. + + Attributes: + commit_id (`str`): + OID of the commit (e.g. `"e7da7f221d5bf496a48136c0cd264e630fe9fcc8"`) + authors (`List[str]`): + List of authors of the commit. + created_at (`datetime`): + Datetime when the commit was created. + title (`str`): + Title of the commit. This is a free-text value entered by the authors. + message (`str`): + Description of the commit. This is a free-text value entered by the authors. + formatted_title (`str`): + Title of the commit formatted as HTML. Only returned if `formatted=True` is set. + formatted_message (`str`): + Description of the commit formatted as HTML. Only returned if `formatted=True` is set. + """ + + commit_id: str + + authors: List[str] + created_at: datetime + title: str + message: str + + formatted_title: Optional[str] + formatted_message: Optional[str] + + +@dataclass +class UserLikes: + """ + Contains information about a user likes on the Hub. + + Attributes: + user (`str`): + Name of the user for which we fetched the likes. + total (`int`): + Total number of likes. + datasets (`List[str]`): + List of datasets liked by the user (as repo_ids). + models (`List[str]`): + List of models liked by the user (as repo_ids). + spaces (`List[str]`): + List of spaces liked by the user (as repo_ids). + """ + + # Metadata + user: str + total: int + + # User likes + datasets: List[str] + models: List[str] + spaces: List[str] + + +@dataclass +class Organization: + """ + Contains information about an organization on the Hub. + + Attributes: + avatar_url (`str`): + URL of the organization's avatar. + name (`str`): + Name of the organization on the Hub (unique). + fullname (`str`): + Organization's full name. + details (`str`, *optional*): + Organization's description. + is_verified (`bool`, *optional*): + Whether the organization is verified. + is_following (`bool`, *optional*): + Whether the authenticated user follows this organization. + num_users (`int`, *optional*): + Number of members in the organization. + num_models (`int`, *optional*): + Number of models owned by the organization. + num_spaces (`int`, *optional*): + Number of Spaces owned by the organization. + num_datasets (`int`, *optional*): + Number of datasets owned by the organization. + num_followers (`int`, *optional*): + Number of followers of the organization. + """ + + avatar_url: str + name: str + fullname: str + details: Optional[str] = None + is_verified: Optional[bool] = None + is_following: Optional[bool] = None + num_users: Optional[int] = None + num_models: Optional[int] = None + num_spaces: Optional[int] = None + num_datasets: Optional[int] = None + num_followers: Optional[int] = None + + def __init__(self, **kwargs) -> None: + self.avatar_url = kwargs.pop("avatarUrl", "") + self.name = kwargs.pop("name", "") + self.fullname = kwargs.pop("fullname", "") + self.details = kwargs.pop("details", None) + self.is_verified = kwargs.pop("isVerified", None) + self.is_following = kwargs.pop("isFollowing", None) + self.num_users = kwargs.pop("numUsers", None) + self.num_models = kwargs.pop("numModels", None) + self.num_spaces = kwargs.pop("numSpaces", None) + self.num_datasets = kwargs.pop("numDatasets", None) + self.num_followers = kwargs.pop("numFollowers", None) + + # forward compatibility + self.__dict__.update(**kwargs) + + +@dataclass +class User: + """ + Contains information about a user on the Hub. + + Attributes: + username (`str`): + Name of the user on the Hub (unique). + fullname (`str`): + User's full name. + avatar_url (`str`): + URL of the user's avatar. + details (`str`, *optional*): + User's details. + is_following (`bool`, *optional*): + Whether the authenticated user is following this user. + is_pro (`bool`, *optional*): + Whether the user is a pro user. + num_models (`int`, *optional*): + Number of models created by the user. + num_datasets (`int`, *optional*): + Number of datasets created by the user. + num_spaces (`int`, *optional*): + Number of spaces created by the user. + num_discussions (`int`, *optional*): + Number of discussions initiated by the user. + num_papers (`int`, *optional*): + Number of papers authored by the user. + num_upvotes (`int`, *optional*): + Number of upvotes received by the user. + num_likes (`int`, *optional*): + Number of likes given by the user. + num_following (`int`, *optional*): + Number of users this user is following. + num_followers (`int`, *optional*): + Number of users following this user. + orgs (list of [`Organization`]): + List of organizations the user is part of. + """ + + # Metadata + username: str + fullname: str + avatar_url: str + details: Optional[str] = None + is_following: Optional[bool] = None + is_pro: Optional[bool] = None + num_models: Optional[int] = None + num_datasets: Optional[int] = None + num_spaces: Optional[int] = None + num_discussions: Optional[int] = None + num_papers: Optional[int] = None + num_upvotes: Optional[int] = None + num_likes: Optional[int] = None + num_following: Optional[int] = None + num_followers: Optional[int] = None + orgs: List[Organization] = field(default_factory=list) + + def __init__(self, **kwargs) -> None: + self.username = kwargs.pop("user", "") + self.fullname = kwargs.pop("fullname", "") + self.avatar_url = kwargs.pop("avatarUrl", "") + self.is_following = kwargs.pop("isFollowing", None) + self.is_pro = kwargs.pop("isPro", None) + self.details = kwargs.pop("details", None) + self.num_models = kwargs.pop("numModels", None) + self.num_datasets = kwargs.pop("numDatasets", None) + self.num_spaces = kwargs.pop("numSpaces", None) + self.num_discussions = kwargs.pop("numDiscussions", None) + self.num_papers = kwargs.pop("numPapers", None) + self.num_upvotes = kwargs.pop("numUpvotes", None) + self.num_likes = kwargs.pop("numLikes", None) + self.num_following = kwargs.pop("numFollowing", None) + self.num_followers = kwargs.pop("numFollowers", None) + self.user_type = kwargs.pop("type", None) + self.orgs = [Organization(**org) for org in kwargs.pop("orgs", [])] + + # forward compatibility + self.__dict__.update(**kwargs) + + +@dataclass +class PaperInfo: + """ + Contains information about a paper on the Hub. + + Attributes: + id (`str`): + arXiv paper ID. + authors (`List[str]`, **optional**): + Names of paper authors + published_at (`datetime`, **optional**): + Date paper published. + title (`str`, **optional**): + Title of the paper. + summary (`str`, **optional**): + Summary of the paper. + upvotes (`int`, **optional**): + Number of upvotes for the paper on the Hub. + discussion_id (`str`, **optional**): + Discussion ID for the paper on the Hub. + source (`str`, **optional**): + Source of the paper. + comments (`int`, **optional**): + Number of comments for the paper on the Hub. + submitted_at (`datetime`, **optional**): + Date paper appeared in daily papers on the Hub. + submitted_by (`User`, **optional**): + Information about who submitted the daily paper. + """ + + id: str + authors: Optional[List[str]] + published_at: Optional[datetime] + title: Optional[str] + summary: Optional[str] + upvotes: Optional[int] + discussion_id: Optional[str] + source: Optional[str] + comments: Optional[int] + submitted_at: Optional[datetime] + submitted_by: Optional[User] + + def __init__(self, **kwargs) -> None: + paper = kwargs.pop("paper", {}) + self.id = kwargs.pop("id", None) or paper.pop("id", None) + authors = paper.pop("authors", None) or kwargs.pop("authors", None) + self.authors = [author.pop("name", None) for author in authors] if authors else None + published_at = paper.pop("publishedAt", None) or kwargs.pop("publishedAt", None) + self.published_at = parse_datetime(published_at) if published_at else None + self.title = kwargs.pop("title", None) + self.source = kwargs.pop("source", None) + self.summary = paper.pop("summary", None) or kwargs.pop("summary", None) + self.upvotes = paper.pop("upvotes", None) or kwargs.pop("upvotes", None) + self.discussion_id = paper.pop("discussionId", None) or kwargs.pop("discussionId", None) + self.comments = kwargs.pop("numComments", 0) + submitted_at = kwargs.pop("publishedAt", None) or kwargs.pop("submittedOnDailyAt", None) + self.submitted_at = parse_datetime(submitted_at) if submitted_at else None + submitted_by = kwargs.pop("submittedBy", None) or kwargs.pop("submittedOnDailyBy", None) + self.submitted_by = User(**submitted_by) if submitted_by else None + + # forward compatibility + self.__dict__.update(**kwargs) + + +@dataclass +class LFSFileInfo: + """ + Contains information about a file stored as LFS on a repo on the Hub. + + Used in the context of listing and permanently deleting LFS files from a repo to free-up space. + See [`list_lfs_files`] and [`permanently_delete_lfs_files`] for more details. + + Git LFS files are tracked using SHA-256 object IDs, rather than file paths, to optimize performance + This approach is necessary because a single object can be referenced by multiple paths across different commits, + making it impractical to search and resolve these connections. Check out [our documentation](https://huggingface.co/docs/hub/storage-limits#advanced-track-lfs-file-references) + to learn how to know which filename(s) is(are) associated with each SHA. + + Attributes: + file_oid (`str`): + SHA-256 object ID of the file. This is the identifier to pass when permanently deleting the file. + filename (`str`): + Possible filename for the LFS object. See the note above for more information. + oid (`str`): + OID of the LFS object. + pushed_at (`datetime`): + Date the LFS object was pushed to the repo. + ref (`str`, *optional*): + Ref where the LFS object has been pushed (if any). + size (`int`): + Size of the LFS object. + + Example: + ```py + >>> from huggingface_hub import HfApi + >>> api = HfApi() + >>> lfs_files = api.list_lfs_files("username/my-cool-repo") + + # Filter files files to delete based on a combination of `filename`, `pushed_at`, `ref` or `size`. + # e.g. select only LFS files in the "checkpoints" folder + >>> lfs_files_to_delete = (lfs_file for lfs_file in lfs_files if lfs_file.filename.startswith("checkpoints/")) + + # Permanently delete LFS files + >>> api.permanently_delete_lfs_files("username/my-cool-repo", lfs_files_to_delete) + ``` + """ + + file_oid: str + filename: str + oid: str + pushed_at: datetime + ref: Optional[str] + size: int + + def __init__(self, **kwargs) -> None: + self.file_oid = kwargs.pop("fileOid") + self.filename = kwargs.pop("filename") + self.oid = kwargs.pop("oid") + self.pushed_at = parse_datetime(kwargs.pop("pushedAt")) + self.ref = kwargs.pop("ref", None) + self.size = kwargs.pop("size") + + # forward compatibility + self.__dict__.update(**kwargs) + + +def future_compatible(fn: CallableT) -> CallableT: + """Wrap a method of `HfApi` to handle `run_as_future=True`. + + A method flagged as "future_compatible" will be called in a thread if `run_as_future=True` and return a + `concurrent.futures.Future` instance. Otherwise, it will be called normally and return the result. + """ + sig = inspect.signature(fn) + args_params = list(sig.parameters)[1:] # remove "self" from list + + @wraps(fn) + def _inner(self, *args, **kwargs): + # Get `run_as_future` value if provided (default to False) + if "run_as_future" in kwargs: + run_as_future = kwargs["run_as_future"] + kwargs["run_as_future"] = False # avoid recursion error + else: + run_as_future = False + for param, value in zip(args_params, args): + if param == "run_as_future": + run_as_future = value + break + + # Call the function in a thread if `run_as_future=True` + if run_as_future: + return self.run_as_future(fn, self, *args, **kwargs) + + # Otherwise, call the function normally + return fn(self, *args, **kwargs) + + _inner.is_future_compatible = True # type: ignore + return _inner # type: ignore + + +class HfApi: + """ + Client to interact with the Hugging Face Hub via HTTP. + + The client is initialized with some high-level settings used in all requests + made to the Hub (HF endpoint, authentication, user agents...). Using the `HfApi` + client is preferred but not mandatory as all of its public methods are exposed + directly at the root of `huggingface_hub`. + + Args: + endpoint (`str`, *optional*): + Endpoint of the Hub. Defaults to . + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + library_name (`str`, *optional*): + The name of the library that is making the HTTP request. Will be added to + the user-agent header. Example: `"transformers"`. + library_version (`str`, *optional*): + The version of the library that is making the HTTP request. Will be added + to the user-agent header. Example: `"4.24.0"`. + user_agent (`str`, `dict`, *optional*): + The user agent info in the form of a dictionary or a single string. It will + be completed with information about the installed packages. + headers (`dict`, *optional*): + Additional headers to be sent with each request. Example: `{"X-My-Header": "value"}`. + Headers passed here are taking precedence over the default headers. + """ + + def __init__( + self, + endpoint: Optional[str] = None, + token: Union[str, bool, None] = None, + library_name: Optional[str] = None, + library_version: Optional[str] = None, + user_agent: Union[Dict, str, None] = None, + headers: Optional[Dict[str, str]] = None, + ) -> None: + self.endpoint = endpoint if endpoint is not None else constants.ENDPOINT + self.token = token + self.library_name = library_name + self.library_version = library_version + self.user_agent = user_agent + self.headers = headers + self._thread_pool: Optional[ThreadPoolExecutor] = None + + def run_as_future(self, fn: Callable[..., R], *args, **kwargs) -> Future[R]: + """ + Run a method in the background and return a Future instance. + + The main goal is to run methods without blocking the main thread (e.g. to push data during a training). + Background jobs are queued to preserve order but are not ran in parallel. If you need to speed-up your scripts + by parallelizing lots of call to the API, you must setup and use your own [ThreadPoolExecutor](https://docs.python.org/3/library/concurrent.futures.html#threadpoolexecutor). + + Note: Most-used methods like [`upload_file`], [`upload_folder`] and [`create_commit`] have a `run_as_future: bool` + argument to directly call them in the background. This is equivalent to calling `api.run_as_future(...)` on them + but less verbose. + + Args: + fn (`Callable`): + The method to run in the background. + *args, **kwargs: + Arguments with which the method will be called. + + Return: + `Future`: a [Future](https://docs.python.org/3/library/concurrent.futures.html#future-objects) instance to + get the result of the task. + + Example: + ```py + >>> from huggingface_hub import HfApi + >>> api = HfApi() + >>> future = api.run_as_future(api.whoami) # instant + >>> future.done() + False + >>> future.result() # wait until complete and return result + (...) + >>> future.done() + True + ``` + """ + if self._thread_pool is None: + self._thread_pool = ThreadPoolExecutor(max_workers=1) + self._thread_pool + return self._thread_pool.submit(fn, *args, **kwargs) + + @validate_hf_hub_args + def whoami(self, token: Union[bool, str, None] = None) -> Dict: + """ + Call HF API to know "whoami". + + Args: + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + """ + # Get the effective token using the helper function get_token + effective_token = token or self.token or get_token() or True + r = get_session().get( + f"{self.endpoint}/api/whoami-v2", + headers=self._build_hf_headers(token=effective_token), + ) + try: + hf_raise_for_status(r) + except HTTPError as e: + if e.response.status_code == 401: + error_message = "Invalid user token." + # Check which token is the effective one and generate the error message accordingly + if effective_token == _get_token_from_google_colab(): + error_message += " The token from Google Colab vault is invalid. Please update it from the UI." + elif effective_token == _get_token_from_environment(): + error_message += ( + " The token from HF_TOKEN environment variable is invalid. " + "Note that HF_TOKEN takes precedence over `hf auth login`." + ) + elif effective_token == _get_token_from_file(): + error_message += " The token stored is invalid. Please run `hf auth login` to update it." + raise HTTPError(error_message, request=e.request, response=e.response) from e + raise + return r.json() + + @_deprecate_method( + version="1.0", + message=( + "Permissions are more complex than when `get_token_permission` was first introduced. " + "OAuth and fine-grain tokens allows for more detailed permissions. " + "If you need to know the permissions associated with a token, please use `whoami` and check the `'auth'` key." + ), + ) + def get_token_permission( + self, token: Union[bool, str, None] = None + ) -> Literal["read", "write", "fineGrained", None]: + """ + Check if a given `token` is valid and return its permissions. + + > [!WARNING] + > This method is deprecated and will be removed in version 1.0. Permissions are more complex than when + > `get_token_permission` was first introduced. OAuth and fine-grain tokens allows for more detailed permissions. + > If you need to know the permissions associated with a token, please use `whoami` and check the `'auth'` key. + + For more details about tokens, please refer to https://huggingface.co/docs/hub/security-tokens#what-are-user-access-tokens. + + Args: + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + `Literal["read", "write", "fineGrained", None]`: Permission granted by the token ("read" or "write"). Returns `None` if no + token passed, if token is invalid or if role is not returned by the server. This typically happens when the token is an OAuth token. + """ + try: + return self.whoami(token=token)["auth"]["accessToken"]["role"] + except (LocalTokenNotFoundError, HTTPError, KeyError): + return None + + def get_model_tags(self) -> Dict: + """ + List all valid model tags as a nested namespace object + """ + path = f"{self.endpoint}/api/models-tags-by-type" + r = get_session().get(path) + hf_raise_for_status(r) + return r.json() + + def get_dataset_tags(self) -> Dict: + """ + List all valid dataset tags as a nested namespace object. + """ + path = f"{self.endpoint}/api/datasets-tags-by-type" + r = get_session().get(path) + hf_raise_for_status(r) + return r.json() + + @_deprecate_arguments( + version="1.0", deprecated_args=["language", "library", "task", "tags"], custom_message="Use `filter` instead." + ) + @validate_hf_hub_args + def list_models( + self, + *, + # Search-query parameter + filter: Union[str, Iterable[str], None] = None, + author: Optional[str] = None, + apps: Optional[Union[str, List[str]]] = None, + gated: Optional[bool] = None, + inference: Optional[Literal["warm"]] = None, + inference_provider: Optional[Union[Literal["all"], "PROVIDER_T", List["PROVIDER_T"]]] = None, + model_name: Optional[str] = None, + trained_dataset: Optional[Union[str, List[str]]] = None, + search: Optional[str] = None, + pipeline_tag: Optional[str] = None, + emissions_thresholds: Optional[Tuple[float, float]] = None, + # Sorting and pagination parameters + sort: Union[Literal["last_modified"], str, None] = None, + direction: Optional[Literal[-1]] = None, + limit: Optional[int] = None, + # Additional data to fetch + expand: Optional[List[ExpandModelProperty_T]] = None, + full: Optional[bool] = None, + cardData: bool = False, + fetch_config: bool = False, + token: Union[bool, str, None] = None, + # Deprecated arguments - use `filter` instead + language: Optional[Union[str, List[str]]] = None, + library: Optional[Union[str, List[str]]] = None, + tags: Optional[Union[str, List[str]]] = None, + task: Optional[Union[str, List[str]]] = None, + ) -> Iterable[ModelInfo]: + """ + List models hosted on the Huggingface Hub, given some filters. + + Args: + filter (`str` or `Iterable[str]`, *optional*): + A string or list of string to filter models on the Hub. + Models can be filtered by library, language, task, tags, and more. + author (`str`, *optional*): + A string which identify the author (user or organization) of the + returned models. + apps (`str` or `List`, *optional*): + A string or list of strings to filter models on the Hub that + support the specified apps. Example values include `"ollama"` or `["ollama", "vllm"]`. + gated (`bool`, *optional*): + A boolean to filter models on the Hub that are gated or not. By default, all models are returned. + If `gated=True` is passed, only gated models are returned. + If `gated=False` is passed, only non-gated models are returned. + inference (`Literal["warm"]`, *optional*): + If "warm", filter models on the Hub currently served by at least one provider. + inference_provider (`Literal["all"]` or `str`, *optional*): + A string to filter models on the Hub that are served by a specific provider. + Pass `"all"` to get all models served by at least one provider. + library (`str` or `List`, *optional*): + Deprecated. Pass a library name in `filter` to filter models by library. + language (`str` or `List`, *optional*): + Deprecated. Pass a language in `filter` to filter models by language. + model_name (`str`, *optional*): + A string that contain complete or partial names for models on the + Hub, such as "bert" or "bert-base-cased" + task (`str` or `List`, *optional*): + Deprecated. Pass a task in `filter` to filter models by task. + trained_dataset (`str` or `List`, *optional*): + A string tag or a list of string tags of the trained dataset for a + model on the Hub. + tags (`str` or `List`, *optional*): + Deprecated. Pass tags in `filter` to filter models by tags. + search (`str`, *optional*): + A string that will be contained in the returned model ids. + pipeline_tag (`str`, *optional*): + A string pipeline tag to filter models on the Hub by, such as `summarization`. + emissions_thresholds (`Tuple`, *optional*): + A tuple of two ints or floats representing a minimum and maximum + carbon footprint to filter the resulting models with in grams. + sort (`Literal["last_modified"]` or `str`, *optional*): + The key with which to sort the resulting models. Possible values are "last_modified", "trending_score", + "created_at", "downloads" and "likes". + direction (`Literal[-1]` or `int`, *optional*): + Direction in which to sort. The value `-1` sorts by descending + order while all other values sort by ascending order. + limit (`int`, *optional*): + The limit on the number of models fetched. Leaving this option + to `None` fetches all models. + expand (`List[ExpandModelProperty_T]`, *optional*): + List properties to return in the response. When used, only the properties in the list will be returned. + This parameter cannot be used if `full`, `cardData` or `fetch_config` are passed. + Possible values are `"author"`, `"cardData"`, `"config"`, `"createdAt"`, `"disabled"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"gguf"`, `"inference"`, `"inferenceProviderMapping"`, `"lastModified"`, `"library_name"`, `"likes"`, `"mask_token"`, `"model-index"`, `"pipeline_tag"`, `"private"`, `"safetensors"`, `"sha"`, `"siblings"`, `"spaces"`, `"tags"`, `"transformersInfo"`, `"trendingScore"`, `"widgetData"`, `"resourceGroup"` and `"xetEnabled"`. + full (`bool`, *optional*): + Whether to fetch all model data, including the `last_modified`, + the `sha`, the files and the `tags`. This is set to `True` by + default when using a filter. + cardData (`bool`, *optional*): + Whether to grab the metadata for the model as well. Can contain + useful information such as carbon emissions, metrics, and + datasets trained on. + fetch_config (`bool`, *optional*): + Whether to fetch the model configs as well. This is not included + in `full` due to its size. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + + Returns: + `Iterable[ModelInfo]`: an iterable of [`huggingface_hub.hf_api.ModelInfo`] objects. + + Example: + + ```python + >>> from huggingface_hub import HfApi + + >>> api = HfApi() + + # List all models + >>> api.list_models() + + # List text classification models + >>> api.list_models(filter="text-classification") + + # List models from the KerasHub library + >>> api.list_models(filter="keras-hub") + + # List models served by Cohere + >>> api.list_models(inference_provider="cohere") + + # List models with "bert" in their name + >>> api.list_models(search="bert") + + # List models with "bert" in their name and pushed by google + >>> api.list_models(search="bert", author="google") + ``` + """ + if expand and (full or cardData or fetch_config): + raise ValueError("`expand` cannot be used if `full`, `cardData` or `fetch_config` are passed.") + + if emissions_thresholds is not None and not cardData: + raise ValueError("`emissions_thresholds` were passed without setting `cardData=True`.") + + path = f"{self.endpoint}/api/models" + headers = self._build_hf_headers(token=token) + params: Dict[str, Any] = {} + + # Build the filter list + filter_list: List[str] = [] + if filter: + filter_list.extend([filter] if isinstance(filter, str) else filter) + if library: + filter_list.extend([library] if isinstance(library, str) else library) + if task: + filter_list.extend([task] if isinstance(task, str) else task) + if trained_dataset: + if isinstance(trained_dataset, str): + trained_dataset = [trained_dataset] + for dataset in trained_dataset: + if not dataset.startswith("dataset:"): + dataset = f"dataset:{dataset}" + filter_list.append(dataset) + if language: + filter_list.extend([language] if isinstance(language, str) else language) + if tags: + filter_list.extend([tags] if isinstance(tags, str) else tags) + if len(filter_list) > 0: + params["filter"] = filter_list + + # Handle other query params + if author: + params["author"] = author + if apps: + if isinstance(apps, str): + apps = [apps] + params["apps"] = apps + if gated is not None: + params["gated"] = gated + if inference is not None: + params["inference"] = inference + if inference_provider is not None: + params["inference_provider"] = inference_provider + if pipeline_tag: + params["pipeline_tag"] = pipeline_tag + search_list = [] + if model_name: + search_list.append(model_name) + if search: + search_list.append(search) + if len(search_list) > 0: + params["search"] = search_list + if sort is not None: + params["sort"] = ( + "lastModified" + if sort == "last_modified" + else "trendingScore" + if sort == "trending_score" + else "createdAt" + if sort == "created_at" + else sort + ) + if direction is not None: + params["direction"] = direction + if limit is not None: + params["limit"] = limit + + # Request additional data + if full: + params["full"] = True + if fetch_config: + params["config"] = True + if cardData: + params["cardData"] = True + if expand: + params["expand"] = expand + + # `items` is a generator + items = paginate(path, params=params, headers=headers) + if limit is not None: + items = islice(items, limit) # Do not iterate over all pages + for item in items: + if "siblings" not in item: + item["siblings"] = None + model_info = ModelInfo(**item) + if emissions_thresholds is None or _is_emission_within_threshold(model_info, *emissions_thresholds): + yield model_info + + @_deprecate_arguments(version="1.0", deprecated_args=["tags"], custom_message="Use `filter` instead.") + @validate_hf_hub_args + def list_datasets( + self, + *, + # Search-query parameter + filter: Union[str, Iterable[str], None] = None, + author: Optional[str] = None, + benchmark: Optional[Union[str, List[str]]] = None, + dataset_name: Optional[str] = None, + gated: Optional[bool] = None, + language_creators: Optional[Union[str, List[str]]] = None, + language: Optional[Union[str, List[str]]] = None, + multilinguality: Optional[Union[str, List[str]]] = None, + size_categories: Optional[Union[str, List[str]]] = None, + task_categories: Optional[Union[str, List[str]]] = None, + task_ids: Optional[Union[str, List[str]]] = None, + search: Optional[str] = None, + # Sorting and pagination parameters + sort: Optional[Union[Literal["last_modified"], str]] = None, + direction: Optional[Literal[-1]] = None, + limit: Optional[int] = None, + # Additional data to fetch + expand: Optional[List[ExpandDatasetProperty_T]] = None, + full: Optional[bool] = None, + token: Union[bool, str, None] = None, + # Deprecated arguments - use `filter` instead + tags: Optional[Union[str, List[str]]] = None, + ) -> Iterable[DatasetInfo]: + """ + List datasets hosted on the Huggingface Hub, given some filters. + + Args: + filter (`str` or `Iterable[str]`, *optional*): + A string or list of string to filter datasets on the hub. + author (`str`, *optional*): + A string which identify the author of the returned datasets. + benchmark (`str` or `List`, *optional*): + A string or list of strings that can be used to identify datasets on + the Hub by their official benchmark. + dataset_name (`str`, *optional*): + A string or list of strings that can be used to identify datasets on + the Hub by its name, such as `SQAC` or `wikineural` + gated (`bool`, *optional*): + A boolean to filter datasets on the Hub that are gated or not. By default, all datasets are returned. + If `gated=True` is passed, only gated datasets are returned. + If `gated=False` is passed, only non-gated datasets are returned. + language_creators (`str` or `List`, *optional*): + A string or list of strings that can be used to identify datasets on + the Hub with how the data was curated, such as `crowdsourced` or + `machine_generated`. + language (`str` or `List`, *optional*): + A string or list of strings representing a two-character language to + filter datasets by on the Hub. + multilinguality (`str` or `List`, *optional*): + A string or list of strings representing a filter for datasets that + contain multiple languages. + size_categories (`str` or `List`, *optional*): + A string or list of strings that can be used to identify datasets on + the Hub by the size of the dataset such as `100K>> from huggingface_hub import HfApi + + >>> api = HfApi() + + # List all datasets + >>> api.list_datasets() + + + # List only the text classification datasets + >>> api.list_datasets(filter="task_categories:text-classification") + + + # List only the datasets in russian for language modeling + >>> api.list_datasets( + ... filter=("language:ru", "task_ids:language-modeling") + ... ) + + # List FiftyOne datasets (identified by the tag "fiftyone" in dataset card) + >>> api.list_datasets(tags="fiftyone") + ``` + + Example usage with the `search` argument: + + ```python + >>> from huggingface_hub import HfApi + + >>> api = HfApi() + + # List all datasets with "text" in their name + >>> api.list_datasets(search="text") + + # List all datasets with "text" in their name made by google + >>> api.list_datasets(search="text", author="google") + ``` + """ + if expand and full: + raise ValueError("`expand` cannot be used if `full` is passed.") + + path = f"{self.endpoint}/api/datasets" + headers = self._build_hf_headers(token=token) + params: Dict[str, Any] = {} + + # Build `filter` list + filter_list = [] + if filter is not None: + if isinstance(filter, str): + filter_list.append(filter) + else: + filter_list.extend(filter) + for key, value in ( + ("benchmark", benchmark), + ("language_creators", language_creators), + ("language", language), + ("multilinguality", multilinguality), + ("size_categories", size_categories), + ("task_categories", task_categories), + ("task_ids", task_ids), + ): + if value: + if isinstance(value, str): + value = [value] + for value_item in value: + if not value_item.startswith(f"{key}:"): + data = f"{key}:{value_item}" + filter_list.append(data) + if tags is not None: + filter_list.extend([tags] if isinstance(tags, str) else tags) + if len(filter_list) > 0: + params["filter"] = filter_list + + # Handle other query params + if author: + params["author"] = author + if gated is not None: + params["gated"] = gated + search_list = [] + if dataset_name: + search_list.append(dataset_name) + if search: + search_list.append(search) + if len(search_list) > 0: + params["search"] = search_list + if sort is not None: + params["sort"] = ( + "lastModified" + if sort == "last_modified" + else "trendingScore" + if sort == "trending_score" + else "createdAt" + if sort == "created_at" + else sort + ) + if direction is not None: + params["direction"] = direction + if limit is not None: + params["limit"] = limit + + # Request additional data + if expand: + params["expand"] = expand + if full: + params["full"] = True + + items = paginate(path, params=params, headers=headers) + if limit is not None: + items = islice(items, limit) # Do not iterate over all pages + for item in items: + if "siblings" not in item: + item["siblings"] = None + yield DatasetInfo(**item) + + @validate_hf_hub_args + def list_spaces( + self, + *, + # Search-query parameter + filter: Union[str, Iterable[str], None] = None, + author: Optional[str] = None, + search: Optional[str] = None, + datasets: Union[str, Iterable[str], None] = None, + models: Union[str, Iterable[str], None] = None, + linked: bool = False, + # Sorting and pagination parameters + sort: Union[Literal["last_modified"], str, None] = None, + direction: Optional[Literal[-1]] = None, + limit: Optional[int] = None, + # Additional data to fetch + expand: Optional[List[ExpandSpaceProperty_T]] = None, + full: Optional[bool] = None, + token: Union[bool, str, None] = None, + ) -> Iterable[SpaceInfo]: + """ + List spaces hosted on the Huggingface Hub, given some filters. + + Args: + filter (`str` or `Iterable`, *optional*): + A string tag or list of tags that can be used to identify Spaces on the Hub. + author (`str`, *optional*): + A string which identify the author of the returned Spaces. + search (`str`, *optional*): + A string that will be contained in the returned Spaces. + datasets (`str` or `Iterable`, *optional*): + Whether to return Spaces that make use of a dataset. + The name of a specific dataset can be passed as a string. + models (`str` or `Iterable`, *optional*): + Whether to return Spaces that make use of a model. + The name of a specific model can be passed as a string. + linked (`bool`, *optional*): + Whether to return Spaces that make use of either a model or a dataset. + sort (`Literal["last_modified"]` or `str`, *optional*): + The key with which to sort the resulting models. Possible values are "last_modified", "trending_score", + "created_at" and "likes". + direction (`Literal[-1]` or `int`, *optional*): + Direction in which to sort. The value `-1` sorts by descending + order while all other values sort by ascending order. + limit (`int`, *optional*): + The limit on the number of Spaces fetched. Leaving this option + to `None` fetches all Spaces. + expand (`List[ExpandSpaceProperty_T]`, *optional*): + List properties to return in the response. When used, only the properties in the list will be returned. + This parameter cannot be used if `full` is passed. + Possible values are `"author"`, `"cardData"`, `"datasets"`, `"disabled"`, `"lastModified"`, `"createdAt"`, `"likes"`, `"models"`, `"private"`, `"runtime"`, `"sdk"`, `"siblings"`, `"sha"`, `"subdomain"`, `"tags"`, `"trendingScore"`, `"usedStorage"`, `"resourceGroup"` and `"xetEnabled"`. + full (`bool`, *optional*): + Whether to fetch all Spaces data, including the `last_modified`, `siblings` + and `card_data` fields. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + `Iterable[SpaceInfo]`: an iterable of [`huggingface_hub.hf_api.SpaceInfo`] objects. + """ + if expand and full: + raise ValueError("`expand` cannot be used if `full` is passed.") + + path = f"{self.endpoint}/api/spaces" + headers = self._build_hf_headers(token=token) + params: Dict[str, Any] = {} + if filter is not None: + params["filter"] = filter + if author is not None: + params["author"] = author + if search is not None: + params["search"] = search + if sort is not None: + params["sort"] = ( + "lastModified" + if sort == "last_modified" + else "trendingScore" + if sort == "trending_score" + else "createdAt" + if sort == "created_at" + else sort + ) + if direction is not None: + params["direction"] = direction + if limit is not None: + params["limit"] = limit + if linked: + params["linked"] = True + if datasets is not None: + params["datasets"] = datasets + if models is not None: + params["models"] = models + + # Request additional data + if expand: + params["expand"] = expand + if full: + params["full"] = True + + items = paginate(path, params=params, headers=headers) + if limit is not None: + items = islice(items, limit) # Do not iterate over all pages + for item in items: + if "siblings" not in item: + item["siblings"] = None + yield SpaceInfo(**item) + + @validate_hf_hub_args + def unlike( + self, + repo_id: str, + *, + token: Union[bool, str, None] = None, + repo_type: Optional[str] = None, + ) -> None: + """ + Unlike a given repo on the Hub (e.g. remove from favorite list). + + To prevent spam usage, it is not possible to `like` a repository from a script. + + See also [`list_liked_repos`]. + + Args: + repo_id (`str`): + The repository to unlike. Example: `"user/my-cool-model"`. + + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if unliking a dataset or space, `None` or + `"model"` if unliking a model. Default is `None`. + + Raises: + [`~utils.RepositoryNotFoundError`]: + If repository is not found (error 404): wrong repo_id/repo_type, private + but not authenticated or repo does not exist. + + Example: + ```python + >>> from huggingface_hub import list_liked_repos, unlike + >>> "gpt2" in list_liked_repos().models # we assume you have already liked gpt2 + True + >>> unlike("gpt2") + >>> "gpt2" in list_liked_repos().models + False + ``` + """ + if repo_type is None: + repo_type = constants.REPO_TYPE_MODEL + response = get_session().delete( + url=f"{self.endpoint}/api/{repo_type}s/{repo_id}/like", headers=self._build_hf_headers(token=token) + ) + hf_raise_for_status(response) + + @validate_hf_hub_args + def list_liked_repos( + self, + user: Optional[str] = None, + *, + token: Union[bool, str, None] = None, + ) -> UserLikes: + """ + List all public repos liked by a user on huggingface.co. + + This list is public so token is optional. If `user` is not passed, it defaults to + the logged in user. + + See also [`unlike`]. + + Args: + user (`str`, *optional*): + Name of the user for which you want to fetch the likes. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + [`UserLikes`]: object containing the user name and 3 lists of repo ids (1 for + models, 1 for datasets and 1 for Spaces). + + Raises: + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + If `user` is not passed and no token found (either from argument or from machine). + + Example: + ```python + >>> from huggingface_hub import list_liked_repos + + >>> likes = list_liked_repos("julien-c") + + >>> likes.user + "julien-c" + + >>> likes.models + ["osanseviero/streamlit_1.15", "Xhaheen/ChatGPT_HF", ...] + ``` + """ + # User is either provided explicitly or retrieved from current token. + if user is None: + me = self.whoami(token=token) + if me["type"] == "user": + user = me["name"] + else: + raise ValueError( + "Cannot list liked repos. You must provide a 'user' as input or be logged in as a user." + ) + + path = f"{self.endpoint}/api/users/{user}/likes" + headers = self._build_hf_headers(token=token) + + likes = list(paginate(path, params={}, headers=headers)) + # Looping over a list of items similar to: + # { + # 'createdAt': '2021-09-09T21:53:27.000Z', + # 'repo': { + # 'name': 'PaddlePaddle/PaddleOCR', + # 'type': 'space' + # } + # } + # Let's loop 3 times over the received list. Less efficient but more straightforward to read. + return UserLikes( + user=user, + total=len(likes), + models=[like["repo"]["name"] for like in likes if like["repo"]["type"] == "model"], + datasets=[like["repo"]["name"] for like in likes if like["repo"]["type"] == "dataset"], + spaces=[like["repo"]["name"] for like in likes if like["repo"]["type"] == "space"], + ) + + @validate_hf_hub_args + def list_repo_likers( + self, + repo_id: str, + *, + repo_type: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> Iterable[User]: + """ + List all users who liked a given repo on the hugging Face Hub. + + See also [`list_liked_repos`]. + + Args: + repo_id (`str`): + The repository to retrieve . Example: `"user/my-cool-model"`. + + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if uploading to a dataset or + space, `None` or `"model"` if uploading to a model. Default is + `None`. + + Returns: + `Iterable[User]`: an iterable of [`huggingface_hub.hf_api.User`] objects. + """ + + # Construct the API endpoint + if repo_type is None: + repo_type = constants.REPO_TYPE_MODEL + path = f"{self.endpoint}/api/{repo_type}s/{repo_id}/likers" + for liker in paginate(path, params={}, headers=self._build_hf_headers(token=token)): + yield User(username=liker["user"], fullname=liker["fullname"], avatar_url=liker["avatarUrl"]) + + @validate_hf_hub_args + def model_info( + self, + repo_id: str, + *, + revision: Optional[str] = None, + timeout: Optional[float] = None, + securityStatus: Optional[bool] = None, + files_metadata: bool = False, + expand: Optional[List[ExpandModelProperty_T]] = None, + token: Union[bool, str, None] = None, + ) -> ModelInfo: + """ + Get info on one specific model on huggingface.co + + Model can be private if you pass an acceptable token or are logged in. + + Args: + repo_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + revision (`str`, *optional*): + The revision of the model repository from which to get the + information. + timeout (`float`, *optional*): + Whether to set a timeout for the request to the Hub. + securityStatus (`bool`, *optional*): + Whether to retrieve the security status from the model + repository as well. The security status will be returned in the `security_repo_status` field. + files_metadata (`bool`, *optional*): + Whether or not to retrieve metadata for files in the repository + (size, LFS metadata, etc). Defaults to `False`. + expand (`List[ExpandModelProperty_T]`, *optional*): + List properties to return in the response. When used, only the properties in the list will be returned. + This parameter cannot be used if `securityStatus` or `files_metadata` are passed. + Possible values are `"author"`, `"baseModels"`, `"cardData"`, `"childrenModelCount"`, `"config"`, `"createdAt"`, `"disabled"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"gguf"`, `"inference"`, `"inferenceProviderMapping"`, `"lastModified"`, `"library_name"`, `"likes"`, `"mask_token"`, `"model-index"`, `"pipeline_tag"`, `"private"`, `"safetensors"`, `"sha"`, `"siblings"`, `"spaces"`, `"tags"`, `"transformersInfo"`, `"trendingScore"`, `"widgetData"`, `"usedStorage"`, `"resourceGroup"` and `"xetEnabled"`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + [`huggingface_hub.hf_api.ModelInfo`]: The model repository information. + + > [!TIP] + > Raises the following errors: + > + > - [`~utils.RepositoryNotFoundError`] + > If the repository to download from cannot be found. This may be because it doesn't exist, + > or because it is set to `private` and you do not have access. + > - [`~utils.RevisionNotFoundError`] + > If the revision to download from cannot be found. + """ + if expand and (securityStatus or files_metadata): + raise ValueError("`expand` cannot be used if `securityStatus` or `files_metadata` are set.") + + headers = self._build_hf_headers(token=token) + path = ( + f"{self.endpoint}/api/models/{repo_id}" + if revision is None + else (f"{self.endpoint}/api/models/{repo_id}/revision/{quote(revision, safe='')}") + ) + params: Dict = {} + if securityStatus: + params["securityStatus"] = True + if files_metadata: + params["blobs"] = True + if expand: + params["expand"] = expand + r = get_session().get(path, headers=headers, timeout=timeout, params=params) + hf_raise_for_status(r) + data = r.json() + return ModelInfo(**data) + + @validate_hf_hub_args + def dataset_info( + self, + repo_id: str, + *, + revision: Optional[str] = None, + timeout: Optional[float] = None, + files_metadata: bool = False, + expand: Optional[List[ExpandDatasetProperty_T]] = None, + token: Union[bool, str, None] = None, + ) -> DatasetInfo: + """ + Get info on one specific dataset on huggingface.co. + + Dataset can be private if you pass an acceptable token. + + Args: + repo_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + revision (`str`, *optional*): + The revision of the dataset repository from which to get the + information. + timeout (`float`, *optional*): + Whether to set a timeout for the request to the Hub. + files_metadata (`bool`, *optional*): + Whether or not to retrieve metadata for files in the repository + (size, LFS metadata, etc). Defaults to `False`. + expand (`List[ExpandDatasetProperty_T]`, *optional*): + List properties to return in the response. When used, only the properties in the list will be returned. + This parameter cannot be used if `files_metadata` is passed. + Possible values are `"author"`, `"cardData"`, `"citation"`, `"createdAt"`, `"disabled"`, `"description"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"lastModified"`, `"likes"`, `"paperswithcode_id"`, `"private"`, `"siblings"`, `"sha"`, `"tags"`, `"trendingScore"`,`"usedStorage"`, `"resourceGroup"` and `"xetEnabled"`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + [`hf_api.DatasetInfo`]: The dataset repository information. + + > [!TIP] + > Raises the following errors: + > + > - [`~utils.RepositoryNotFoundError`] + > If the repository to download from cannot be found. This may be because it doesn't exist, + > or because it is set to `private` and you do not have access. + > - [`~utils.RevisionNotFoundError`] + > If the revision to download from cannot be found. + """ + if expand and files_metadata: + raise ValueError("`expand` cannot be used if `files_metadata` is set.") + + headers = self._build_hf_headers(token=token) + path = ( + f"{self.endpoint}/api/datasets/{repo_id}" + if revision is None + else (f"{self.endpoint}/api/datasets/{repo_id}/revision/{quote(revision, safe='')}") + ) + params: Dict = {} + if files_metadata: + params["blobs"] = True + if expand: + params["expand"] = expand + + r = get_session().get(path, headers=headers, timeout=timeout, params=params) + hf_raise_for_status(r) + data = r.json() + return DatasetInfo(**data) + + @validate_hf_hub_args + def space_info( + self, + repo_id: str, + *, + revision: Optional[str] = None, + timeout: Optional[float] = None, + files_metadata: bool = False, + expand: Optional[List[ExpandSpaceProperty_T]] = None, + token: Union[bool, str, None] = None, + ) -> SpaceInfo: + """ + Get info on one specific Space on huggingface.co. + + Space can be private if you pass an acceptable token. + + Args: + repo_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + revision (`str`, *optional*): + The revision of the space repository from which to get the + information. + timeout (`float`, *optional*): + Whether to set a timeout for the request to the Hub. + files_metadata (`bool`, *optional*): + Whether or not to retrieve metadata for files in the repository + (size, LFS metadata, etc). Defaults to `False`. + expand (`List[ExpandSpaceProperty_T]`, *optional*): + List properties to return in the response. When used, only the properties in the list will be returned. + This parameter cannot be used if `full` is passed. + Possible values are `"author"`, `"cardData"`, `"createdAt"`, `"datasets"`, `"disabled"`, `"lastModified"`, `"likes"`, `"models"`, `"private"`, `"runtime"`, `"sdk"`, `"siblings"`, `"sha"`, `"subdomain"`, `"tags"`, `"trendingScore"`, `"usedStorage"`, `"resourceGroup"` and `"xetEnabled"`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + [`~hf_api.SpaceInfo`]: The space repository information. + + > [!TIP] + > Raises the following errors: + > + > - [`~utils.RepositoryNotFoundError`] + > If the repository to download from cannot be found. This may be because it doesn't exist, + > or because it is set to `private` and you do not have access. + > - [`~utils.RevisionNotFoundError`] + > If the revision to download from cannot be found. + """ + if expand and files_metadata: + raise ValueError("`expand` cannot be used if `files_metadata` is set.") + + headers = self._build_hf_headers(token=token) + path = ( + f"{self.endpoint}/api/spaces/{repo_id}" + if revision is None + else (f"{self.endpoint}/api/spaces/{repo_id}/revision/{quote(revision, safe='')}") + ) + params: Dict = {} + if files_metadata: + params["blobs"] = True + if expand: + params["expand"] = expand + + r = get_session().get(path, headers=headers, timeout=timeout, params=params) + hf_raise_for_status(r) + data = r.json() + return SpaceInfo(**data) + + @validate_hf_hub_args + def repo_info( + self, + repo_id: str, + *, + revision: Optional[str] = None, + repo_type: Optional[str] = None, + timeout: Optional[float] = None, + files_metadata: bool = False, + expand: Optional[Union[ExpandModelProperty_T, ExpandDatasetProperty_T, ExpandSpaceProperty_T]] = None, + token: Union[bool, str, None] = None, + ) -> Union[ModelInfo, DatasetInfo, SpaceInfo]: + """ + Get the info object for a given repo of a given type. + + Args: + repo_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + revision (`str`, *optional*): + The revision of the repository from which to get the + information. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if getting repository info from a dataset or a space, + `None` or `"model"` if getting repository info from a model. Default is `None`. + timeout (`float`, *optional*): + Whether to set a timeout for the request to the Hub. + expand (`ExpandModelProperty_T` or `ExpandDatasetProperty_T` or `ExpandSpaceProperty_T`, *optional*): + List properties to return in the response. When used, only the properties in the list will be returned. + This parameter cannot be used if `files_metadata` is passed. + For an exhaustive list of available properties, check out [`model_info`], [`dataset_info`] or [`space_info`]. + files_metadata (`bool`, *optional*): + Whether or not to retrieve metadata for files in the repository + (size, LFS metadata, etc). Defaults to `False`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + `Union[SpaceInfo, DatasetInfo, ModelInfo]`: The repository information, as a + [`huggingface_hub.hf_api.DatasetInfo`], [`huggingface_hub.hf_api.ModelInfo`] + or [`huggingface_hub.hf_api.SpaceInfo`] object. + + > [!TIP] + > Raises the following errors: + > + > - [`~utils.RepositoryNotFoundError`] + > If the repository to download from cannot be found. This may be because it doesn't exist, + > or because it is set to `private` and you do not have access. + > - [`~utils.RevisionNotFoundError`] + > If the revision to download from cannot be found. + """ + if repo_type is None or repo_type == "model": + method = self.model_info + elif repo_type == "dataset": + method = self.dataset_info # type: ignore + elif repo_type == "space": + method = self.space_info # type: ignore + else: + raise ValueError("Unsupported repo type.") + return method( + repo_id, + revision=revision, + token=token, + timeout=timeout, + expand=expand, # type: ignore[arg-type] + files_metadata=files_metadata, + ) + + @validate_hf_hub_args + def repo_exists( + self, + repo_id: str, + *, + repo_type: Optional[str] = None, + token: Union[str, bool, None] = None, + ) -> bool: + """ + Checks if a repository exists on the Hugging Face Hub. + + Args: + repo_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if getting repository info from a dataset or a space, + `None` or `"model"` if getting repository info from a model. Default is `None`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + True if the repository exists, False otherwise. + + Examples: + ```py + >>> from huggingface_hub import repo_exists + >>> repo_exists("google/gemma-7b") + True + >>> repo_exists("google/not-a-repo") + False + ``` + """ + try: + self.repo_info(repo_id=repo_id, repo_type=repo_type, token=token) + return True + except GatedRepoError: + return True # we don't have access but it exists + except RepositoryNotFoundError: + return False + + @validate_hf_hub_args + def revision_exists( + self, + repo_id: str, + revision: str, + *, + repo_type: Optional[str] = None, + token: Union[str, bool, None] = None, + ) -> bool: + """ + Checks if a specific revision exists on a repo on the Hugging Face Hub. + + Args: + repo_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + revision (`str`): + The revision of the repository to check. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if getting repository info from a dataset or a space, + `None` or `"model"` if getting repository info from a model. Default is `None`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + True if the repository and the revision exists, False otherwise. + + Examples: + ```py + >>> from huggingface_hub import revision_exists + >>> revision_exists("google/gemma-7b", "float16") + True + >>> revision_exists("google/gemma-7b", "not-a-revision") + False + ``` + """ + try: + self.repo_info(repo_id=repo_id, revision=revision, repo_type=repo_type, token=token) + return True + except RevisionNotFoundError: + return False + except RepositoryNotFoundError: + return False + + @validate_hf_hub_args + def file_exists( + self, + repo_id: str, + filename: str, + *, + repo_type: Optional[str] = None, + revision: Optional[str] = None, + token: Union[str, bool, None] = None, + ) -> bool: + """ + Checks if a file exists in a repository on the Hugging Face Hub. + + Args: + repo_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + filename (`str`): + The name of the file to check, for example: + `"config.json"` + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if getting repository info from a dataset or a space, + `None` or `"model"` if getting repository info from a model. Default is `None`. + revision (`str`, *optional*): + The revision of the repository from which to get the information. Defaults to `"main"` branch. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + True if the file exists, False otherwise. + + Examples: + ```py + >>> from huggingface_hub import file_exists + >>> file_exists("bigcode/starcoder", "config.json") + True + >>> file_exists("bigcode/starcoder", "not-a-file") + False + >>> file_exists("bigcode/not-a-repo", "config.json") + False + ``` + """ + url = hf_hub_url( + repo_id=repo_id, repo_type=repo_type, revision=revision, filename=filename, endpoint=self.endpoint + ) + try: + if token is None: + token = self.token + get_hf_file_metadata(url, token=token) + return True + except GatedRepoError: # raise specifically on gated repo + raise + except (RepositoryNotFoundError, EntryNotFoundError, RevisionNotFoundError): + return False + + @validate_hf_hub_args + def list_repo_files( + self, + repo_id: str, + *, + revision: Optional[str] = None, + repo_type: Optional[str] = None, + token: Union[str, bool, None] = None, + ) -> List[str]: + """ + Get the list of files in a given repo. + + Args: + repo_id (`str`): + A namespace (user or an organization) and a repo name separated by a `/`. + revision (`str`, *optional*): + The revision of the repository from which to get the information. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if uploading to a dataset or space, `None` or `"model"` if uploading to + a model. Default is `None`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + `List[str]`: the list of files in a given repository. + """ + return [ + f.rfilename + for f in self.list_repo_tree( + repo_id=repo_id, recursive=True, revision=revision, repo_type=repo_type, token=token + ) + if isinstance(f, RepoFile) + ] + + @validate_hf_hub_args + def list_repo_tree( + self, + repo_id: str, + path_in_repo: Optional[str] = None, + *, + recursive: bool = False, + expand: bool = False, + revision: Optional[str] = None, + repo_type: Optional[str] = None, + token: Union[str, bool, None] = None, + ) -> Iterable[Union[RepoFile, RepoFolder]]: + """ + List a repo tree's files and folders and get information about them. + + Args: + repo_id (`str`): + A namespace (user or an organization) and a repo name separated by a `/`. + path_in_repo (`str`, *optional*): + Relative path of the tree (folder) in the repo, for example: + `"checkpoints/1fec34a/results"`. Will default to the root tree (folder) of the repository. + recursive (`bool`, *optional*, defaults to `False`): + Whether to list tree's files and folders recursively. + expand (`bool`, *optional*, defaults to `False`): + Whether to fetch more information about the tree's files and folders (e.g. last commit and files' security scan results). This + operation is more expensive for the server so only 50 results are returned per page (instead of 1000). + As pagination is implemented in `huggingface_hub`, this is transparent for you except for the time it + takes to get the results. + revision (`str`, *optional*): + The revision of the repository from which to get the tree. Defaults to `"main"` branch. + repo_type (`str`, *optional*): + The type of the repository from which to get the tree (`"model"`, `"dataset"` or `"space"`. + Defaults to `"model"`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + `Iterable[Union[RepoFile, RepoFolder]]`: + The information about the tree's files and folders, as an iterable of [`RepoFile`] and [`RepoFolder`] objects. The order of the files and folders is + not guaranteed. + + Raises: + [`~utils.RepositoryNotFoundError`]: + If repository is not found (error 404): wrong repo_id/repo_type, private but not authenticated or repo + does not exist. + [`~utils.RevisionNotFoundError`]: + If revision is not found (error 404) on the repo. + [`~utils.EntryNotFoundError`]: + If the tree (folder) does not exist (error 404) on the repo. + + Examples: + + Get information about a repo's tree. + ```py + >>> from huggingface_hub import list_repo_tree + >>> repo_tree = list_repo_tree("lysandre/arxiv-nlp") + >>> repo_tree + + >>> list(repo_tree) + [ + RepoFile(path='.gitattributes', size=391, blob_id='ae8c63daedbd4206d7d40126955d4e6ab1c80f8f', lfs=None, last_commit=None, security=None), + RepoFile(path='README.md', size=391, blob_id='43bd404b159de6fba7c2f4d3264347668d43af25', lfs=None, last_commit=None, security=None), + RepoFile(path='config.json', size=554, blob_id='2f9618c3a19b9a61add74f70bfb121335aeef666', lfs=None, last_commit=None, security=None), + RepoFile( + path='flax_model.msgpack', size=497764107, blob_id='8095a62ccb4d806da7666fcda07467e2d150218e', + lfs={'size': 497764107, 'sha256': 'd88b0d6a6ff9c3f8151f9d3228f57092aaea997f09af009eefd7373a77b5abb9', 'pointer_size': 134}, last_commit=None, security=None + ), + RepoFile(path='merges.txt', size=456318, blob_id='226b0752cac7789c48f0cb3ec53eda48b7be36cc', lfs=None, last_commit=None, security=None), + RepoFile( + path='pytorch_model.bin', size=548123560, blob_id='64eaa9c526867e404b68f2c5d66fd78e27026523', + lfs={'size': 548123560, 'sha256': '9be78edb5b928eba33aa88f431551348f7466ba9f5ef3daf1d552398722a5436', 'pointer_size': 134}, last_commit=None, security=None + ), + RepoFile(path='vocab.json', size=898669, blob_id='b00361fece0387ca34b4b8b8539ed830d644dbeb', lfs=None, last_commit=None, security=None)] + ] + ``` + + Get even more information about a repo's tree (last commit and files' security scan results) + ```py + >>> from huggingface_hub import list_repo_tree + >>> repo_tree = list_repo_tree("prompthero/openjourney-v4", expand=True) + >>> list(repo_tree) + [ + RepoFolder( + path='feature_extractor', + tree_id='aa536c4ea18073388b5b0bc791057a7296a00398', + last_commit={ + 'oid': '47b62b20b20e06b9de610e840282b7e6c3d51190', + 'title': 'Upload diffusers weights (#48)', + 'date': datetime.datetime(2023, 3, 21, 9, 5, 27, tzinfo=datetime.timezone.utc) + } + ), + RepoFolder( + path='safety_checker', + tree_id='65aef9d787e5557373fdf714d6c34d4fcdd70440', + last_commit={ + 'oid': '47b62b20b20e06b9de610e840282b7e6c3d51190', + 'title': 'Upload diffusers weights (#48)', + 'date': datetime.datetime(2023, 3, 21, 9, 5, 27, tzinfo=datetime.timezone.utc) + } + ), + RepoFile( + path='model_index.json', + size=582, + blob_id='d3d7c1e8c3e78eeb1640b8e2041ee256e24c9ee1', + lfs=None, + last_commit={ + 'oid': 'b195ed2d503f3eb29637050a886d77bd81d35f0e', + 'title': 'Fix deprecation warning by changing `CLIPFeatureExtractor` to `CLIPImageProcessor`. (#54)', + 'date': datetime.datetime(2023, 5, 15, 21, 41, 59, tzinfo=datetime.timezone.utc) + }, + security={ + 'safe': True, + 'av_scan': {'virusFound': False, 'virusNames': None}, + 'pickle_import_scan': None + } + ) + ... + ] + ``` + """ + repo_type = repo_type or constants.REPO_TYPE_MODEL + revision = quote(revision, safe="") if revision is not None else constants.DEFAULT_REVISION + headers = self._build_hf_headers(token=token) + + encoded_path_in_repo = "/" + quote(path_in_repo, safe="") if path_in_repo else "" + tree_url = f"{self.endpoint}/api/{repo_type}s/{repo_id}/tree/{revision}{encoded_path_in_repo}" + for path_info in paginate(path=tree_url, headers=headers, params={"recursive": recursive, "expand": expand}): + yield (RepoFile(**path_info) if path_info["type"] == "file" else RepoFolder(**path_info)) + + @validate_hf_hub_args + def list_repo_refs( + self, + repo_id: str, + *, + repo_type: Optional[str] = None, + include_pull_requests: bool = False, + token: Union[str, bool, None] = None, + ) -> GitRefs: + """ + Get the list of refs of a given repo (both tags and branches). + + Args: + repo_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if listing refs from a dataset or a Space, + `None` or `"model"` if listing from a model. Default is `None`. + include_pull_requests (`bool`, *optional*): + Whether to include refs from pull requests in the list. Defaults to `False`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Example: + ```py + >>> from huggingface_hub import HfApi + >>> api = HfApi() + >>> api.list_repo_refs("gpt2") + GitRefs(branches=[GitRefInfo(name='main', ref='refs/heads/main', target_commit='e7da7f221d5bf496a48136c0cd264e630fe9fcc8')], converts=[], tags=[]) + + >>> api.list_repo_refs("bigcode/the-stack", repo_type='dataset') + GitRefs( + branches=[ + GitRefInfo(name='main', ref='refs/heads/main', target_commit='18edc1591d9ce72aa82f56c4431b3c969b210ae3'), + GitRefInfo(name='v1.1.a1', ref='refs/heads/v1.1.a1', target_commit='f9826b862d1567f3822d3d25649b0d6d22ace714') + ], + converts=[], + tags=[ + GitRefInfo(name='v1.0', ref='refs/tags/v1.0', target_commit='c37a8cd1e382064d8aced5e05543c5f7753834da') + ] + ) + ``` + + Returns: + [`GitRefs`]: object containing all information about branches and tags for a + repo on the Hub. + """ + repo_type = repo_type or constants.REPO_TYPE_MODEL + response = get_session().get( + f"{self.endpoint}/api/{repo_type}s/{repo_id}/refs", + headers=self._build_hf_headers(token=token), + params={"include_prs": 1} if include_pull_requests else {}, + ) + hf_raise_for_status(response) + data = response.json() + + def _format_as_git_ref_info(item: Dict) -> GitRefInfo: + return GitRefInfo(name=item["name"], ref=item["ref"], target_commit=item["targetCommit"]) + + return GitRefs( + branches=[_format_as_git_ref_info(item) for item in data["branches"]], + converts=[_format_as_git_ref_info(item) for item in data["converts"]], + tags=[_format_as_git_ref_info(item) for item in data["tags"]], + pull_requests=[_format_as_git_ref_info(item) for item in data["pullRequests"]] + if include_pull_requests + else None, + ) + + @validate_hf_hub_args + def list_repo_commits( + self, + repo_id: str, + *, + repo_type: Optional[str] = None, + token: Union[bool, str, None] = None, + revision: Optional[str] = None, + formatted: bool = False, + ) -> List[GitCommitInfo]: + """ + Get the list of commits of a given revision for a repo on the Hub. + + Commits are sorted by date (last commit first). + + Args: + repo_id (`str`): + A namespace (user or an organization) and a repo name separated by a `/`. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if listing commits from a dataset or a Space, `None` or `"model"` if + listing from a model. Default is `None`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + revision (`str`, *optional*): + The git revision to commit from. Defaults to the head of the `"main"` branch. + formatted (`bool`): + Whether to return the HTML-formatted title and description of the commits. Defaults to False. + + Example: + ```py + >>> from huggingface_hub import HfApi + >>> api = HfApi() + + # Commits are sorted by date (last commit first) + >>> initial_commit = api.list_repo_commits("gpt2")[-1] + + # Initial commit is always a system commit containing the `.gitattributes` file. + >>> initial_commit + GitCommitInfo( + commit_id='9b865efde13a30c13e0a33e536cf3e4a5a9d71d8', + authors=['system'], + created_at=datetime.datetime(2019, 2, 18, 10, 36, 15, tzinfo=datetime.timezone.utc), + title='initial commit', + message='', + formatted_title=None, + formatted_message=None + ) + + # Create an empty branch by deriving from initial commit + >>> api.create_branch("gpt2", "new_empty_branch", revision=initial_commit.commit_id) + ``` + + Returns: + List[[`GitCommitInfo`]]: list of objects containing information about the commits for a repo on the Hub. + + Raises: + [`~utils.RepositoryNotFoundError`]: + If repository is not found (error 404): wrong repo_id/repo_type, private but not authenticated or repo + does not exist. + [`~utils.RevisionNotFoundError`]: + If revision is not found (error 404) on the repo. + """ + repo_type = repo_type or constants.REPO_TYPE_MODEL + revision = quote(revision, safe="") if revision is not None else constants.DEFAULT_REVISION + + # Paginate over results and return the list of commits. + return [ + GitCommitInfo( + commit_id=item["id"], + authors=[author["user"] for author in item["authors"]], + created_at=parse_datetime(item["date"]), + title=item["title"], + message=item["message"], + formatted_title=item.get("formatted", {}).get("title"), + formatted_message=item.get("formatted", {}).get("message"), + ) + for item in paginate( + f"{self.endpoint}/api/{repo_type}s/{repo_id}/commits/{revision}", + headers=self._build_hf_headers(token=token), + params={"expand[]": "formatted"} if formatted else {}, + ) + ] + + @validate_hf_hub_args + def get_paths_info( + self, + repo_id: str, + paths: Union[List[str], str], + *, + expand: bool = False, + revision: Optional[str] = None, + repo_type: Optional[str] = None, + token: Union[str, bool, None] = None, + ) -> List[Union[RepoFile, RepoFolder]]: + """ + Get information about a repo's paths. + + Args: + repo_id (`str`): + A namespace (user or an organization) and a repo name separated by a `/`. + paths (`Union[List[str], str]`, *optional*): + The paths to get information about. If a path do not exist, it is ignored without raising + an exception. + expand (`bool`, *optional*, defaults to `False`): + Whether to fetch more information about the paths (e.g. last commit and files' security scan results). This + operation is more expensive for the server so only 50 results are returned per page (instead of 1000). + As pagination is implemented in `huggingface_hub`, this is transparent for you except for the time it + takes to get the results. + revision (`str`, *optional*): + The revision of the repository from which to get the information. Defaults to `"main"` branch. + repo_type (`str`, *optional*): + The type of the repository from which to get the information (`"model"`, `"dataset"` or `"space"`. + Defaults to `"model"`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + `List[Union[RepoFile, RepoFolder]]`: + The information about the paths, as a list of [`RepoFile`] and [`RepoFolder`] objects. + + Raises: + [`~utils.RepositoryNotFoundError`]: + If repository is not found (error 404): wrong repo_id/repo_type, private but not authenticated or repo + does not exist. + [`~utils.RevisionNotFoundError`]: + If revision is not found (error 404) on the repo. + + Example: + ```py + >>> from huggingface_hub import get_paths_info + >>> paths_info = get_paths_info("allenai/c4", ["README.md", "en"], repo_type="dataset") + >>> paths_info + [ + RepoFile(path='README.md', size=2379, blob_id='f84cb4c97182890fc1dbdeaf1a6a468fd27b4fff', lfs=None, last_commit=None, security=None), + RepoFolder(path='en', tree_id='dc943c4c40f53d02b31ced1defa7e5f438d5862e', last_commit=None) + ] + ``` + """ + repo_type = repo_type or constants.REPO_TYPE_MODEL + revision = quote(revision, safe="") if revision is not None else constants.DEFAULT_REVISION + headers = self._build_hf_headers(token=token) + + response = get_session().post( + f"{self.endpoint}/api/{repo_type}s/{repo_id}/paths-info/{revision}", + data={ + "paths": paths if isinstance(paths, list) else [paths], + "expand": expand, + }, + headers=headers, + ) + hf_raise_for_status(response) + paths_info = response.json() + return [ + RepoFile(**path_info) if path_info["type"] == "file" else RepoFolder(**path_info) + for path_info in paths_info + ] + + @validate_hf_hub_args + def super_squash_history( + self, + repo_id: str, + *, + branch: Optional[str] = None, + commit_message: Optional[str] = None, + repo_type: Optional[str] = None, + token: Union[str, bool, None] = None, + ) -> None: + """Squash commit history on a branch for a repo on the Hub. + + Squashing the repo history is useful when you know you'll make hundreds of commits and you don't want to + clutter the history. Squashing commits can only be performed from the head of a branch. + + > [!WARNING] + > Once squashed, the commit history cannot be retrieved. This is a non-revertible operation. + + > [!WARNING] + > Once the history of a branch has been squashed, it is not possible to merge it back into another branch since + > their history will have diverged. + + Args: + repo_id (`str`): + A namespace (user or an organization) and a repo name separated by a `/`. + branch (`str`, *optional*): + The branch to squash. Defaults to the head of the `"main"` branch. + commit_message (`str`, *optional*): + The commit message to use for the squashed commit. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if listing commits from a dataset or a Space, `None` or `"model"` if + listing from a model. Default is `None`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Raises: + [`~utils.RepositoryNotFoundError`]: + If repository is not found (error 404): wrong repo_id/repo_type, private but not authenticated or repo + does not exist. + [`~utils.RevisionNotFoundError`]: + If the branch to squash cannot be found. + [`~utils.BadRequestError`]: + If invalid reference for a branch. You cannot squash history on tags. + + Example: + ```py + >>> from huggingface_hub import HfApi + >>> api = HfApi() + + # Create repo + >>> repo_id = api.create_repo("test-squash").repo_id + + # Make a lot of commits. + >>> api.upload_file(repo_id=repo_id, path_in_repo="file.txt", path_or_fileobj=b"content") + >>> api.upload_file(repo_id=repo_id, path_in_repo="lfs.bin", path_or_fileobj=b"content") + >>> api.upload_file(repo_id=repo_id, path_in_repo="file.txt", path_or_fileobj=b"another_content") + + # Squash history + >>> api.super_squash_history(repo_id=repo_id) + ``` + """ + if repo_type is None: + repo_type = constants.REPO_TYPE_MODEL + if repo_type not in constants.REPO_TYPES: + raise ValueError("Invalid repo type") + if branch is None: + branch = constants.DEFAULT_REVISION + + # Prepare request + url = f"{self.endpoint}/api/{repo_type}s/{repo_id}/super-squash/{quote(branch, safe='')}" + headers = self._build_hf_headers(token=token) + commit_message = commit_message or f"Super-squash branch '{branch}' using huggingface_hub" + + # Super-squash + response = get_session().post(url=url, headers=headers, json={"message": commit_message}) + hf_raise_for_status(response) + + @validate_hf_hub_args + def list_lfs_files( + self, + repo_id: str, + *, + repo_type: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> Iterable[LFSFileInfo]: + """ + List all LFS files in a repo on the Hub. + + This is primarily useful to count how much storage a repo is using and to eventually clean up large files + with [`permanently_delete_lfs_files`]. Note that this would be a permanent action that will affect all commits + referencing this deleted files and that cannot be undone. + + Args: + repo_id (`str`): + The repository for which you are listing LFS files. + repo_type (`str`, *optional*): + Type of repository. Set to `"dataset"` or `"space"` if listing from a dataset or space, `None` or + `"model"` if listing from a model. Default is `None`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + `Iterable[LFSFileInfo]`: An iterator of [`LFSFileInfo`] objects. + + Example: + ```py + >>> from huggingface_hub import HfApi + >>> api = HfApi() + >>> lfs_files = api.list_lfs_files("username/my-cool-repo") + + # Filter files files to delete based on a combination of `filename`, `pushed_at`, `ref` or `size`. + # e.g. select only LFS files in the "checkpoints" folder + >>> lfs_files_to_delete = (lfs_file for lfs_file in lfs_files if lfs_file.filename.startswith("checkpoints/")) + + # Permanently delete LFS files + >>> api.permanently_delete_lfs_files("username/my-cool-repo", lfs_files_to_delete) + ``` + """ + # Prepare request + if repo_type is None: + repo_type = constants.REPO_TYPE_MODEL + url = f"{self.endpoint}/api/{repo_type}s/{repo_id}/lfs-files" + headers = self._build_hf_headers(token=token) + + # Paginate over LFS items + for item in paginate(url, params={}, headers=headers): + yield LFSFileInfo(**item) + + @validate_hf_hub_args + def permanently_delete_lfs_files( + self, + repo_id: str, + lfs_files: Iterable[LFSFileInfo], + *, + rewrite_history: bool = True, + repo_type: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> None: + """ + Permanently delete LFS files from a repo on the Hub. + + > [!WARNING] + > This is a permanent action that will affect all commits referencing the deleted files and might corrupt your + > repository. This is a non-revertible operation. Use it only if you know what you are doing. + + Args: + repo_id (`str`): + The repository for which you are listing LFS files. + lfs_files (`Iterable[LFSFileInfo]`): + An iterable of [`LFSFileInfo`] items to permanently delete from the repo. Use [`list_lfs_files`] to list + all LFS files from a repo. + rewrite_history (`bool`, *optional*, default to `True`): + Whether to rewrite repository history to remove file pointers referencing the deleted LFS files (recommended). + repo_type (`str`, *optional*): + Type of repository. Set to `"dataset"` or `"space"` if listing from a dataset or space, `None` or + `"model"` if listing from a model. Default is `None`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Example: + ```py + >>> from huggingface_hub import HfApi + >>> api = HfApi() + >>> lfs_files = api.list_lfs_files("username/my-cool-repo") + + # Filter files files to delete based on a combination of `filename`, `pushed_at`, `ref` or `size`. + # e.g. select only LFS files in the "checkpoints" folder + >>> lfs_files_to_delete = (lfs_file for lfs_file in lfs_files if lfs_file.filename.startswith("checkpoints/")) + + # Permanently delete LFS files + >>> api.permanently_delete_lfs_files("username/my-cool-repo", lfs_files_to_delete) + ``` + """ + # Prepare request + if repo_type is None: + repo_type = constants.REPO_TYPE_MODEL + url = f"{self.endpoint}/api/{repo_type}s/{repo_id}/lfs-files/batch" + headers = self._build_hf_headers(token=token) + + # Delete LFS items by batches of 1000 + for batch in chunk_iterable(lfs_files, 1000): + shas = [item.file_oid for item in batch] + if len(shas) == 0: + return + payload = { + "deletions": { + "sha": shas, + "rewriteHistory": rewrite_history, + } + } + response = get_session().post(url, headers=headers, json=payload) + hf_raise_for_status(response) + + @validate_hf_hub_args + def create_repo( + self, + repo_id: str, + *, + token: Union[str, bool, None] = None, + private: Optional[bool] = None, + repo_type: Optional[str] = None, + exist_ok: bool = False, + resource_group_id: Optional[str] = None, + space_sdk: Optional[str] = None, + space_hardware: Optional[SpaceHardware] = None, + space_storage: Optional[SpaceStorage] = None, + space_sleep_time: Optional[int] = None, + space_secrets: Optional[List[Dict[str, str]]] = None, + space_variables: Optional[List[Dict[str, str]]] = None, + ) -> RepoUrl: + """Create an empty repo on the HuggingFace Hub. + + Args: + repo_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + private (`bool`, *optional*): + Whether to make the repo private. If `None` (default), the repo will be public unless the organization's default is private. This value is ignored if the repo already exists. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if uploading to a dataset or + space, `None` or `"model"` if uploading to a model. Default is + `None`. + exist_ok (`bool`, *optional*, defaults to `False`): + If `True`, do not raise an error if repo already exists. + resource_group_id (`str`, *optional*): + Resource group in which to create the repo. Resource groups is only available for Enterprise Hub organizations and + allow to define which members of the organization can access the resource. The ID of a resource group + can be found in the URL of the resource's page on the Hub (e.g. `"66670e5163145ca562cb1988"`). + To learn more about resource groups, see https://huggingface.co/docs/hub/en/security-resource-groups. + space_sdk (`str`, *optional*): + Choice of SDK to use if repo_type is "space". Can be "streamlit", "gradio", "docker", or "static". + space_hardware (`SpaceHardware` or `str`, *optional*): + Choice of Hardware if repo_type is "space". See [`SpaceHardware`] for a complete list. + space_storage (`SpaceStorage` or `str`, *optional*): + Choice of persistent storage tier. Example: `"small"`. See [`SpaceStorage`] for a complete list. + space_sleep_time (`int`, *optional*): + Number of seconds of inactivity to wait before a Space is put to sleep. Set to `-1` if you don't want + your Space to sleep (default behavior for upgraded hardware). For free hardware, you can't configure + the sleep time (value is fixed to 48 hours of inactivity). + See https://huggingface.co/docs/hub/spaces-gpus#sleep-time for more details. + space_secrets (`List[Dict[str, str]]`, *optional*): + A list of secret keys to set in your Space. Each item is in the form `{"key": ..., "value": ..., "description": ...}` where description is optional. + For more details, see https://huggingface.co/docs/hub/spaces-overview#managing-secrets. + space_variables (`List[Dict[str, str]]`, *optional*): + A list of public environment variables to set in your Space. Each item is in the form `{"key": ..., "value": ..., "description": ...}` where description is optional. + For more details, see https://huggingface.co/docs/hub/spaces-overview#managing-secrets-and-environment-variables. + + Returns: + [`RepoUrl`]: URL to the newly created repo. Value is a subclass of `str` containing + attributes like `endpoint`, `repo_type` and `repo_id`. + """ + organization, name = repo_id.split("/") if "/" in repo_id else (None, repo_id) + + path = f"{self.endpoint}/api/repos/create" + + if repo_type not in constants.REPO_TYPES: + raise ValueError("Invalid repo type") + + json: Dict[str, Any] = {"name": name, "organization": organization} + if private is not None: + json["private"] = private + if repo_type is not None: + json["type"] = repo_type + if repo_type == "space": + if space_sdk is None: + raise ValueError( + "No space_sdk provided. `create_repo` expects space_sdk to be one" + f" of {constants.SPACES_SDK_TYPES} when repo_type is 'space'`" + ) + if space_sdk not in constants.SPACES_SDK_TYPES: + raise ValueError(f"Invalid space_sdk. Please choose one of {constants.SPACES_SDK_TYPES}.") + json["sdk"] = space_sdk + + if space_sdk is not None and repo_type != "space": + warnings.warn("Ignoring provided space_sdk because repo_type is not 'space'.") + + function_args = [ + "space_hardware", + "space_storage", + "space_sleep_time", + "space_secrets", + "space_variables", + ] + json_keys = ["hardware", "storageTier", "sleepTimeSeconds", "secrets", "variables"] + values = [space_hardware, space_storage, space_sleep_time, space_secrets, space_variables] + + if repo_type == "space": + json.update({k: v for k, v in zip(json_keys, values) if v is not None}) + else: + provided_space_args = [key for key, value in zip(function_args, values) if value is not None] + + if provided_space_args: + warnings.warn(f"Ignoring provided {', '.join(provided_space_args)} because repo_type is not 'space'.") + + if getattr(self, "_lfsmultipartthresh", None): + # Testing purposes only. + # See https://github.com/huggingface/huggingface_hub/pull/733/files#r820604472 + json["lfsmultipartthresh"] = self._lfsmultipartthresh # type: ignore + + if resource_group_id is not None: + json["resourceGroupId"] = resource_group_id + + headers = self._build_hf_headers(token=token) + while True: + r = get_session().post(path, headers=headers, json=json) + if r.status_code == 409 and "Cannot create repo: another conflicting operation is in progress" in r.text: + # Since https://github.com/huggingface/moon-landing/pull/7272 (private repo), it is not possible to + # concurrently create repos on the Hub for a same user. This is rarely an issue, except when running + # tests. To avoid any inconvenience, we retry to create the repo for this specific error. + # NOTE: This could have being fixed directly in the tests but adding it here should fixed CIs for all + # dependent libraries. + # NOTE: If a fix is implemented server-side, we should be able to remove this retry mechanism. + logger.debug("Create repo failed due to a concurrency issue. Retrying...") + continue + break + + try: + hf_raise_for_status(r) + except HTTPError as err: + if exist_ok and err.response.status_code == 409: + # Repo already exists and `exist_ok=True` + pass + elif exist_ok and err.response.status_code == 403: + # No write permission on the namespace but repo might already exist + try: + self.repo_info(repo_id=repo_id, repo_type=repo_type, token=token) + if repo_type is None or repo_type == constants.REPO_TYPE_MODEL: + return RepoUrl(f"{self.endpoint}/{repo_id}") + return RepoUrl(f"{self.endpoint}/{repo_type}/{repo_id}") + except HfHubHTTPError: + raise err + else: + raise + + d = r.json() + return RepoUrl(d["url"], endpoint=self.endpoint) + + @validate_hf_hub_args + def delete_repo( + self, + repo_id: str, + *, + token: Union[str, bool, None] = None, + repo_type: Optional[str] = None, + missing_ok: bool = False, + ) -> None: + """ + Delete a repo from the HuggingFace Hub. CAUTION: this is irreversible. + + Args: + repo_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if uploading to a dataset or + space, `None` or `"model"` if uploading to a model. + missing_ok (`bool`, *optional*, defaults to `False`): + If `True`, do not raise an error if repo does not exist. + + Raises: + [`~utils.RepositoryNotFoundError`] + If the repository to delete from cannot be found and `missing_ok` is set to False (default). + """ + organization, name = repo_id.split("/") if "/" in repo_id else (None, repo_id) + + path = f"{self.endpoint}/api/repos/delete" + + if repo_type not in constants.REPO_TYPES: + raise ValueError("Invalid repo type") + + json = {"name": name, "organization": organization} + if repo_type is not None: + json["type"] = repo_type + + headers = self._build_hf_headers(token=token) + r = get_session().delete(path, headers=headers, json=json) + try: + hf_raise_for_status(r) + except RepositoryNotFoundError: + if not missing_ok: + raise + + @_deprecate_method(version="0.32", message="Please use `update_repo_settings` instead.") + @validate_hf_hub_args + def update_repo_visibility( + self, + repo_id: str, + private: bool = False, + *, + token: Union[str, bool, None] = None, + repo_type: Optional[str] = None, + ) -> Dict[str, bool]: + """Update the visibility setting of a repository. + + Deprecated. Use `update_repo_settings` instead. + + Args: + repo_id (`str`, *optional*): + A namespace (user or an organization) and a repo name separated by a `/`. + private (`bool`, *optional*, defaults to `False`): + Whether the repository should be private. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if uploading to a dataset or + space, `None` or `"model"` if uploading to a model. Default is + `None`. + + Returns: + The HTTP response in json. + + > [!TIP] + > Raises the following errors: + > + > - [`~utils.RepositoryNotFoundError`] + > If the repository to download from cannot be found. This may be because it doesn't exist, + > or because it is set to `private` and you do not have access. + """ + if repo_type not in constants.REPO_TYPES: + raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") + if repo_type is None: + repo_type = constants.REPO_TYPE_MODEL # default repo type + + r = get_session().put( + url=f"{self.endpoint}/api/{repo_type}s/{repo_id}/settings", + headers=self._build_hf_headers(token=token), + json={"private": private}, + ) + hf_raise_for_status(r) + return r.json() + + @validate_hf_hub_args + def update_repo_settings( + self, + repo_id: str, + *, + gated: Optional[Literal["auto", "manual", False]] = None, + private: Optional[bool] = None, + token: Union[str, bool, None] = None, + repo_type: Optional[str] = None, + xet_enabled: Optional[bool] = None, + ) -> None: + """ + Update the settings of a repository, including gated access and visibility. + + To give more control over how repos are used, the Hub allows repo authors to enable + access requests for their repos, and also to set the visibility of the repo to private. + + Args: + repo_id (`str`): + A namespace (user or an organization) and a repo name separated by a /. + gated (`Literal["auto", "manual", False]`, *optional*): + The gated status for the repository. If set to `None` (default), the `gated` setting of the repository won't be updated. + * "auto": The repository is gated, and access requests are automatically approved or denied based on predefined criteria. + * "manual": The repository is gated, and access requests require manual approval. + * False : The repository is not gated, and anyone can access it. + private (`bool`, *optional*): + Whether the repository should be private. + token (`Union[str, bool, None]`, *optional*): + A valid user access token (string). Defaults to the locally saved token, + which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass False. + repo_type (`str`, *optional*): + The type of the repository to update settings from (`"model"`, `"dataset"` or `"space"`). + Defaults to `"model"`. + xet_enabled (`bool`, *optional*): + Whether the repository should be enabled for Xet Storage. + Raises: + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + If gated is not one of "auto", "manual", or False. + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + If repo_type is not one of the values in constants.REPO_TYPES. + [`~utils.HfHubHTTPError`]: + If the request to the Hugging Face Hub API fails. + [`~utils.RepositoryNotFoundError`] + If the repository to download from cannot be found. This may be because it doesn't exist, + or because it is set to `private` and you do not have access. + """ + + if repo_type not in constants.REPO_TYPES: + raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") + if repo_type is None: + repo_type = constants.REPO_TYPE_MODEL # default repo type + + # Prepare the JSON payload for the PUT request + payload: Dict = {} + + if gated is not None: + if gated not in ["auto", "manual", False]: + raise ValueError(f"Invalid gated status, must be one of 'auto', 'manual', or False. Got '{gated}'.") + payload["gated"] = gated + + if private is not None: + payload["private"] = private + + if xet_enabled is not None: + payload["xetEnabled"] = xet_enabled + + if len(payload) == 0: + raise ValueError("At least one setting must be updated.") + + # Build headers + headers = self._build_hf_headers(token=token) + + r = get_session().put( + url=f"{self.endpoint}/api/{repo_type}s/{repo_id}/settings", + headers=headers, + json=payload, + ) + hf_raise_for_status(r) + + def move_repo( + self, + from_id: str, + to_id: str, + *, + repo_type: Optional[str] = None, + token: Union[str, bool, None] = None, + ): + """ + Moving a repository from namespace1/repo_name1 to namespace2/repo_name2 + + Note there are certain limitations. For more information about moving + repositories, please see + https://hf.co/docs/hub/repositories-settings#renaming-or-transferring-a-repo. + + Args: + from_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. Original repository identifier. + to_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. Final repository identifier. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if uploading to a dataset or + space, `None` or `"model"` if uploading to a model. Default is + `None`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + > [!TIP] + > Raises the following errors: + > + > - [`~utils.RepositoryNotFoundError`] + > If the repository to download from cannot be found. This may be because it doesn't exist, + > or because it is set to `private` and you do not have access. + """ + if len(from_id.split("/")) != 2: + raise ValueError(f"Invalid repo_id: {from_id}. It should have a namespace (:namespace:/:repo_name:)") + + if len(to_id.split("/")) != 2: + raise ValueError(f"Invalid repo_id: {to_id}. It should have a namespace (:namespace:/:repo_name:)") + + if repo_type is None: + repo_type = constants.REPO_TYPE_MODEL # Hub won't accept `None`. + + json = {"fromRepo": from_id, "toRepo": to_id, "type": repo_type} + + path = f"{self.endpoint}/api/repos/move" + headers = self._build_hf_headers(token=token) + r = get_session().post(path, headers=headers, json=json) + try: + hf_raise_for_status(r) + except HfHubHTTPError as e: + e.append_to_message( + "\nFor additional documentation please see" + " https://hf.co/docs/hub/repositories-settings#renaming-or-transferring-a-repo." + ) + raise + + @overload + def create_commit( # type: ignore + self, + repo_id: str, + operations: Iterable[CommitOperation], + *, + commit_message: str, + commit_description: Optional[str] = None, + token: Union[str, bool, None] = None, + repo_type: Optional[str] = None, + revision: Optional[str] = None, + create_pr: Optional[bool] = None, + num_threads: int = 5, + parent_commit: Optional[str] = None, + run_as_future: Literal[False] = ..., + ) -> CommitInfo: ... + + @overload + def create_commit( + self, + repo_id: str, + operations: Iterable[CommitOperation], + *, + commit_message: str, + commit_description: Optional[str] = None, + token: Union[str, bool, None] = None, + repo_type: Optional[str] = None, + revision: Optional[str] = None, + create_pr: Optional[bool] = None, + num_threads: int = 5, + parent_commit: Optional[str] = None, + run_as_future: Literal[True] = ..., + ) -> Future[CommitInfo]: ... + + @validate_hf_hub_args + @future_compatible + def create_commit( + self, + repo_id: str, + operations: Iterable[CommitOperation], + *, + commit_message: str, + commit_description: Optional[str] = None, + token: Union[str, bool, None] = None, + repo_type: Optional[str] = None, + revision: Optional[str] = None, + create_pr: Optional[bool] = None, + num_threads: int = 5, + parent_commit: Optional[str] = None, + run_as_future: bool = False, + ) -> Union[CommitInfo, Future[CommitInfo]]: + """ + Creates a commit in the given repo, deleting & uploading files as needed. + + > [!WARNING] + > The input list of `CommitOperation` will be mutated during the commit process. Do not reuse the same objects + > for multiple commits. + + > [!WARNING] + > `create_commit` assumes that the repo already exists on the Hub. If you get a + > Client error 404, please make sure you are authenticated and that `repo_id` and + > `repo_type` are set correctly. If repo does not exist, create it first using + > [`~hf_api.create_repo`]. + + > [!WARNING] + > `create_commit` is limited to 25k LFS files and a 1GB payload for regular files. + + Args: + repo_id (`str`): + The repository in which the commit will be created, for example: + `"username/custom_transformers"` + + operations (`Iterable` of [`~hf_api.CommitOperation`]): + An iterable of operations to include in the commit, either: + + - [`~hf_api.CommitOperationAdd`] to upload a file + - [`~hf_api.CommitOperationDelete`] to delete a file + - [`~hf_api.CommitOperationCopy`] to copy a file + + Operation objects will be mutated to include information relative to the upload. Do not reuse the + same objects for multiple commits. + + commit_message (`str`): + The summary (first line) of the commit that will be created. + + commit_description (`str`, *optional*): + The description of the commit that will be created + + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if uploading to a dataset or + space, `None` or `"model"` if uploading to a model. Default is + `None`. + + revision (`str`, *optional*): + The git revision to commit from. Defaults to the head of the `"main"` branch. + + create_pr (`boolean`, *optional*): + Whether or not to create a Pull Request with that commit. Defaults to `False`. + If `revision` is not set, PR is opened against the `"main"` branch. If + `revision` is set and is a branch, PR is opened against this branch. If + `revision` is set and is not a branch name (example: a commit oid), an + `RevisionNotFoundError` is returned by the server. + + num_threads (`int`, *optional*): + Number of concurrent threads for uploading files. Defaults to 5. + Setting it to 2 means at most 2 files will be uploaded concurrently. + + parent_commit (`str`, *optional*): + The OID / SHA of the parent commit, as a hexadecimal string. + Shorthands (7 first characters) are also supported. If specified and `create_pr` is `False`, + the commit will fail if `revision` does not point to `parent_commit`. If specified and `create_pr` + is `True`, the pull request will be created from `parent_commit`. Specifying `parent_commit` + ensures the repo has not changed before committing the changes, and can be especially useful + if the repo is updated / committed to concurrently. + run_as_future (`bool`, *optional*): + Whether or not to run this method in the background. Background jobs are run sequentially without + blocking the main thread. Passing `run_as_future=True` will return a [Future](https://docs.python.org/3/library/concurrent.futures.html#future-objects) + object. Defaults to `False`. + + Returns: + [`CommitInfo`] or `Future`: + Instance of [`CommitInfo`] containing information about the newly created commit (commit hash, commit + url, pr url, commit message,...). If `run_as_future=True` is passed, returns a Future object which will + contain the result when executed. + + Raises: + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + If commit message is empty. + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + If parent commit is not a valid commit OID. + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + If a README.md file with an invalid metadata section is committed. In this case, the commit will fail + early, before trying to upload any file. + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + If `create_pr` is `True` and revision is neither `None` nor `"main"`. + [`~utils.RepositoryNotFoundError`]: + If repository is not found (error 404): wrong repo_id/repo_type, private + but not authenticated or repo does not exist. + """ + if parent_commit is not None and not constants.REGEX_COMMIT_OID.fullmatch(parent_commit): + raise ValueError( + f"`parent_commit` is not a valid commit OID. It must match the following regex: {constants.REGEX_COMMIT_OID}" + ) + + if commit_message is None or len(commit_message) == 0: + raise ValueError("`commit_message` can't be empty, please pass a value.") + + commit_description = commit_description if commit_description is not None else "" + repo_type = repo_type if repo_type is not None else constants.REPO_TYPE_MODEL + if repo_type not in constants.REPO_TYPES: + raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") + unquoted_revision = revision or constants.DEFAULT_REVISION + revision = quote(unquoted_revision, safe="") + create_pr = create_pr if create_pr is not None else False + + headers = self._build_hf_headers(token=token) + + operations = list(operations) + additions = [op for op in operations if isinstance(op, CommitOperationAdd)] + copies = [op for op in operations if isinstance(op, CommitOperationCopy)] + nb_additions = len(additions) + nb_copies = len(copies) + nb_deletions = len(operations) - nb_additions - nb_copies + + for addition in additions: + if addition._is_committed: + raise ValueError( + f"CommitOperationAdd {addition} has already being committed and cannot be reused. Please create a" + " new CommitOperationAdd object if you want to create a new commit." + ) + + if repo_type != "dataset": + for addition in additions: + if addition.path_in_repo.endswith((".arrow", ".parquet")): + warnings.warn( + f"It seems that you are about to commit a data file ({addition.path_in_repo}) to a {repo_type}" + " repository. You are sure this is intended? If you are trying to upload a dataset, please" + " set `repo_type='dataset'` or `--repo-type=dataset` in a CLI." + ) + + logger.debug( + f"About to commit to the hub: {len(additions)} addition(s), {len(copies)} copie(s) and" + f" {nb_deletions} deletion(s)." + ) + + # If updating a README.md file, make sure the metadata format is valid + # It's better to fail early than to fail after all the files have been uploaded. + for addition in additions: + if addition.path_in_repo == "README.md": + with addition.as_file() as file: + content = file.read().decode() + self._validate_yaml(content, repo_type=repo_type, token=token) + # Skip other additions after `README.md` has been processed + break + + # If updating twice the same file or update then delete a file in a single commit + _warn_on_overwriting_operations(operations) + + self.preupload_lfs_files( + repo_id=repo_id, + additions=additions, + token=token, + repo_type=repo_type, + revision=unquoted_revision, # first-class methods take unquoted revision + create_pr=create_pr, + num_threads=num_threads, + free_memory=False, # do not remove `CommitOperationAdd.path_or_fileobj` on LFS files for "normal" users + ) + + files_to_copy = _fetch_files_to_copy( + copies=copies, + repo_type=repo_type, + repo_id=repo_id, + headers=headers, + revision=unquoted_revision, + endpoint=self.endpoint, + ) + # Remove no-op operations (files that have not changed) + operations_without_no_op = [] + for operation in operations: + if ( + isinstance(operation, CommitOperationAdd) + and operation._remote_oid is not None + and operation._remote_oid == operation._local_oid + ): + # File already exists on the Hub and has not changed: we can skip it. + logger.debug(f"Skipping upload for '{operation.path_in_repo}' as the file has not changed.") + continue + if ( + isinstance(operation, CommitOperationCopy) + and operation._dest_oid is not None + and operation._dest_oid == operation._src_oid + ): + # Source and destination files are identical - skip + logger.debug( + f"Skipping copy for '{operation.src_path_in_repo}' -> '{operation.path_in_repo}' as the content of the source file is the same as the destination file." + ) + continue + operations_without_no_op.append(operation) + if len(operations) != len(operations_without_no_op): + logger.info( + f"Removing {len(operations) - len(operations_without_no_op)} file(s) from commit that have not changed." + ) + + # Return early if empty commit + if len(operations_without_no_op) == 0: + logger.warning("No files have been modified since last commit. Skipping to prevent empty commit.") + + # Get latest commit info + try: + info = self.repo_info(repo_id=repo_id, repo_type=repo_type, revision=unquoted_revision, token=token) + except RepositoryNotFoundError as e: + e.append_to_message(_CREATE_COMMIT_NO_REPO_ERROR_MESSAGE) + raise + + # Return commit info based on latest commit + url_prefix = self.endpoint + if repo_type is not None and repo_type != constants.REPO_TYPE_MODEL: + url_prefix = f"{url_prefix}/{repo_type}s" + return CommitInfo( + commit_url=f"{url_prefix}/{repo_id}/commit/{info.sha}", + commit_message=commit_message, + commit_description=commit_description, + oid=info.sha, # type: ignore[arg-type] + ) + + commit_payload = _prepare_commit_payload( + operations=operations, + files_to_copy=files_to_copy, + commit_message=commit_message, + commit_description=commit_description, + parent_commit=parent_commit, + ) + commit_url = f"{self.endpoint}/api/{repo_type}s/{repo_id}/commit/{revision}" + + def _payload_as_ndjson() -> Iterable[bytes]: + for item in commit_payload: + yield json.dumps(item).encode() + yield b"\n" + + headers = { + # See https://github.com/huggingface/huggingface_hub/issues/1085#issuecomment-1265208073 + "Content-Type": "application/x-ndjson", + **headers, + } + data = b"".join(_payload_as_ndjson()) + params = {"create_pr": "1"} if create_pr else None + + try: + commit_resp = get_session().post(url=commit_url, headers=headers, data=data, params=params) + hf_raise_for_status(commit_resp, endpoint_name="commit") + except RepositoryNotFoundError as e: + e.append_to_message(_CREATE_COMMIT_NO_REPO_ERROR_MESSAGE) + raise + except EntryNotFoundError as e: + if nb_deletions > 0 and "A file with this name doesn't exist" in str(e): + e.append_to_message( + "\nMake sure to differentiate file and folder paths in delete" + " operations with a trailing '/' or using `is_folder=True/False`." + ) + raise + + # Mark additions as committed (cannot be reused in another commit) + for addition in additions: + addition._is_committed = True + + commit_data = commit_resp.json() + return CommitInfo( + commit_url=commit_data["commitUrl"], + commit_message=commit_message, + commit_description=commit_description, + oid=commit_data["commitOid"], + pr_url=commit_data["pullRequestUrl"] if create_pr else None, + ) + + def preupload_lfs_files( + self, + repo_id: str, + additions: Iterable[CommitOperationAdd], + *, + token: Union[str, bool, None] = None, + repo_type: Optional[str] = None, + revision: Optional[str] = None, + create_pr: Optional[bool] = None, + num_threads: int = 5, + free_memory: bool = True, + gitignore_content: Optional[str] = None, + ): + """Pre-upload LFS files to S3 in preparation on a future commit. + + This method is useful if you are generating the files to upload on-the-fly and you don't want to store them + in memory before uploading them all at once. + + > [!WARNING] + > This is a power-user method. You shouldn't need to call it directly to make a normal commit. + > Use [`create_commit`] directly instead. + + > [!WARNING] + > Commit operations will be mutated during the process. In particular, the attached `path_or_fileobj` will be + > removed after the upload to save memory (and replaced by an empty `bytes` object). Do not reuse the same + > objects except to pass them to [`create_commit`]. If you don't want to remove the attached content from the + > commit operation object, pass `free_memory=False`. + + Args: + repo_id (`str`): + The repository in which you will commit the files, for example: `"username/custom_transformers"`. + + operations (`Iterable` of [`CommitOperationAdd`]): + The list of files to upload. Warning: the objects in this list will be mutated to include information + relative to the upload. Do not reuse the same objects for multiple commits. + + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + repo_type (`str`, *optional*): + The type of repository to upload to (e.g. `"model"` -default-, `"dataset"` or `"space"`). + + revision (`str`, *optional*): + The git revision to commit from. Defaults to the head of the `"main"` branch. + + create_pr (`boolean`, *optional*): + Whether or not you plan to create a Pull Request with that commit. Defaults to `False`. + + num_threads (`int`, *optional*): + Number of concurrent threads for uploading files. Defaults to 5. + Setting it to 2 means at most 2 files will be uploaded concurrently. + + gitignore_content (`str`, *optional*): + The content of the `.gitignore` file to know which files should be ignored. The order of priority + is to first check if `gitignore_content` is passed, then check if the `.gitignore` file is present + in the list of files to commit and finally default to the `.gitignore` file already hosted on the Hub + (if any). + + Example: + ```py + >>> from huggingface_hub import CommitOperationAdd, preupload_lfs_files, create_commit, create_repo + + >>> repo_id = create_repo("test_preupload").repo_id + + # Generate and preupload LFS files one by one + >>> operations = [] # List of all `CommitOperationAdd` objects that will be generated + >>> for i in range(5): + ... content = ... # generate binary content + ... addition = CommitOperationAdd(path_in_repo=f"shard_{i}_of_5.bin", path_or_fileobj=content) + ... preupload_lfs_files(repo_id, additions=[addition]) # upload + free memory + ... operations.append(addition) + + # Create commit + >>> create_commit(repo_id, operations=operations, commit_message="Commit all shards") + ``` + """ + repo_type = repo_type if repo_type is not None else constants.REPO_TYPE_MODEL + if repo_type not in constants.REPO_TYPES: + raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") + revision = quote(revision, safe="") if revision is not None else constants.DEFAULT_REVISION + create_pr = create_pr if create_pr is not None else False + headers = self._build_hf_headers(token=token) + + # Check if a `gitignore` file is being committed to the Hub. + additions = list(additions) + if gitignore_content is None: + for addition in additions: + if addition.path_in_repo == ".gitignore": + with addition.as_file() as f: + gitignore_content = f.read().decode() + break + + # Filter out already uploaded files + new_additions = [addition for addition in additions if not addition._is_uploaded] + + # Check which new files are LFS + # For some items, we might have already fetched the upload mode (in case of upload_large_folder) + additions_no_upload_mode = [addition for addition in new_additions if addition._upload_mode is None] + if len(additions_no_upload_mode) > 0: + try: + _fetch_upload_modes( + additions=additions_no_upload_mode, + repo_type=repo_type, + repo_id=repo_id, + headers=headers, + revision=revision, + endpoint=self.endpoint, + create_pr=create_pr or False, + gitignore_content=gitignore_content, + ) + except RepositoryNotFoundError as e: + e.append_to_message(_CREATE_COMMIT_NO_REPO_ERROR_MESSAGE) + raise + + # Filter out regular files + new_lfs_additions = [addition for addition in new_additions if addition._upload_mode == "lfs"] + + # Filter out files listed in .gitignore + new_lfs_additions_to_upload = [] + for addition in new_lfs_additions: + if addition._should_ignore: + logger.debug(f"Skipping upload for LFS file '{addition.path_in_repo}' (ignored by gitignore file).") + else: + new_lfs_additions_to_upload.append(addition) + if len(new_lfs_additions) != len(new_lfs_additions_to_upload): + logger.info( + f"Skipped upload for {len(new_lfs_additions) - len(new_lfs_additions_to_upload)} LFS file(s) " + "(ignored by gitignore file)." + ) + # If no LFS files remain to upload, keep previous behavior and log explicitly + if len(new_lfs_additions_to_upload) == 0: + logger.debug("No LFS files to upload.") + return + # Prepare upload parameters + upload_kwargs = { + "additions": new_lfs_additions_to_upload, + "repo_type": repo_type, + "repo_id": repo_id, + "headers": headers, + "endpoint": self.endpoint, + # If `create_pr`, we don't want to check user permission on the revision as users with read permission + # should still be able to create PRs even if they don't have write permission on the target branch of the + # PR (i.e. `revision`). + "revision": revision if not create_pr else None, + } + _upload_files(**upload_kwargs, num_threads=num_threads, create_pr=create_pr) # type: ignore [arg-type] + for addition in new_lfs_additions_to_upload: + addition._is_uploaded = True + if free_memory: + addition.path_or_fileobj = b"" + + @overload + def upload_file( # type: ignore + self, + *, + path_or_fileobj: Union[str, Path, bytes, BinaryIO], + path_in_repo: str, + repo_id: str, + token: Union[str, bool, None] = None, + repo_type: Optional[str] = None, + revision: Optional[str] = None, + commit_message: Optional[str] = None, + commit_description: Optional[str] = None, + create_pr: Optional[bool] = None, + parent_commit: Optional[str] = None, + run_as_future: Literal[False] = ..., + ) -> CommitInfo: ... + + @overload + def upload_file( + self, + *, + path_or_fileobj: Union[str, Path, bytes, BinaryIO], + path_in_repo: str, + repo_id: str, + token: Union[str, bool, None] = None, + repo_type: Optional[str] = None, + revision: Optional[str] = None, + commit_message: Optional[str] = None, + commit_description: Optional[str] = None, + create_pr: Optional[bool] = None, + parent_commit: Optional[str] = None, + run_as_future: Literal[True] = ..., + ) -> Future[CommitInfo]: ... + + @validate_hf_hub_args + @future_compatible + def upload_file( + self, + *, + path_or_fileobj: Union[str, Path, bytes, BinaryIO], + path_in_repo: str, + repo_id: str, + token: Union[str, bool, None] = None, + repo_type: Optional[str] = None, + revision: Optional[str] = None, + commit_message: Optional[str] = None, + commit_description: Optional[str] = None, + create_pr: Optional[bool] = None, + parent_commit: Optional[str] = None, + run_as_future: bool = False, + ) -> Union[CommitInfo, Future[CommitInfo]]: + """ + Upload a local file (up to 50 GB) to the given repo. The upload is done + through a HTTP post request, and doesn't require git or git-lfs to be + installed. + + Args: + path_or_fileobj (`str`, `Path`, `bytes`, or `IO`): + Path to a file on the local machine or binary data stream / + fileobj / buffer. + path_in_repo (`str`): + Relative filepath in the repo, for example: + `"checkpoints/1fec34a/weights.bin"` + repo_id (`str`): + The repository to which the file will be uploaded, for example: + `"username/custom_transformers"` + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if uploading to a dataset or + space, `None` or `"model"` if uploading to a model. Default is + `None`. + revision (`str`, *optional*): + The git revision to commit from. Defaults to the head of the `"main"` branch. + commit_message (`str`, *optional*): + The summary / title / first line of the generated commit + commit_description (`str` *optional*) + The description of the generated commit + create_pr (`boolean`, *optional*): + Whether or not to create a Pull Request with that commit. Defaults to `False`. + If `revision` is not set, PR is opened against the `"main"` branch. If + `revision` is set and is a branch, PR is opened against this branch. If + `revision` is set and is not a branch name (example: a commit oid), an + `RevisionNotFoundError` is returned by the server. + parent_commit (`str`, *optional*): + The OID / SHA of the parent commit, as a hexadecimal string. Shorthands (7 first characters) are also supported. + If specified and `create_pr` is `False`, the commit will fail if `revision` does not point to `parent_commit`. + If specified and `create_pr` is `True`, the pull request will be created from `parent_commit`. + Specifying `parent_commit` ensures the repo has not changed before committing the changes, and can be + especially useful if the repo is updated / committed to concurrently. + run_as_future (`bool`, *optional*): + Whether or not to run this method in the background. Background jobs are run sequentially without + blocking the main thread. Passing `run_as_future=True` will return a [Future](https://docs.python.org/3/library/concurrent.futures.html#future-objects) + object. Defaults to `False`. + + + Returns: + [`CommitInfo`] or `Future`: + Instance of [`CommitInfo`] containing information about the newly created commit (commit hash, commit + url, pr url, commit message,...). If `run_as_future=True` is passed, returns a Future object which will + contain the result when executed. + > [!TIP] + > Raises the following errors: + > + > - [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) + > if the HuggingFace API returned an error + > - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + > if some parameter value is invalid + > - [`~utils.RepositoryNotFoundError`] + > If the repository to download from cannot be found. This may be because it doesn't exist, + > or because it is set to `private` and you do not have access. + > - [`~utils.RevisionNotFoundError`] + > If the revision to download from cannot be found. + + > [!WARNING] + > `upload_file` assumes that the repo already exists on the Hub. If you get a + > Client error 404, please make sure you are authenticated and that `repo_id` and + > `repo_type` are set correctly. If repo does not exist, create it first using + > [`~hf_api.create_repo`]. + + Example: + + ```python + >>> from huggingface_hub import upload_file + + >>> with open("./local/filepath", "rb") as fobj: + ... upload_file( + ... path_or_fileobj=fileobj, + ... path_in_repo="remote/file/path.h5", + ... repo_id="username/my-dataset", + ... repo_type="dataset", + ... token="my_token", + ... ) + "https://huggingface.co/datasets/username/my-dataset/blob/main/remote/file/path.h5" + + >>> upload_file( + ... path_or_fileobj=".\\\\local\\\\file\\\\path", + ... path_in_repo="remote/file/path.h5", + ... repo_id="username/my-model", + ... token="my_token", + ... ) + "https://huggingface.co/username/my-model/blob/main/remote/file/path.h5" + + >>> upload_file( + ... path_or_fileobj=".\\\\local\\\\file\\\\path", + ... path_in_repo="remote/file/path.h5", + ... repo_id="username/my-model", + ... token="my_token", + ... create_pr=True, + ... ) + "https://huggingface.co/username/my-model/blob/refs%2Fpr%2F1/remote/file/path.h5" + ``` + """ + if repo_type not in constants.REPO_TYPES: + raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") + + commit_message = ( + commit_message if commit_message is not None else f"Upload {path_in_repo} with huggingface_hub" + ) + operation = CommitOperationAdd( + path_or_fileobj=path_or_fileobj, + path_in_repo=path_in_repo, + ) + + commit_info = self.create_commit( + repo_id=repo_id, + repo_type=repo_type, + operations=[operation], + commit_message=commit_message, + commit_description=commit_description, + token=token, + revision=revision, + create_pr=create_pr, + parent_commit=parent_commit, + ) + + if commit_info.pr_url is not None: + revision = quote(_parse_revision_from_pr_url(commit_info.pr_url), safe="") + if repo_type in constants.REPO_TYPES_URL_PREFIXES: + repo_id = constants.REPO_TYPES_URL_PREFIXES[repo_type] + repo_id + revision = revision if revision is not None else constants.DEFAULT_REVISION + + return CommitInfo( + commit_url=commit_info.commit_url, + commit_message=commit_info.commit_message, + commit_description=commit_info.commit_description, + oid=commit_info.oid, + pr_url=commit_info.pr_url, + # Similar to `hf_hub_url` but it's "blob" instead of "resolve" + # TODO: remove this in v1.0 + _url=f"{self.endpoint}/{repo_id}/blob/{revision}/{path_in_repo}", + ) + + @overload + def upload_folder( # type: ignore + self, + *, + repo_id: str, + folder_path: Union[str, Path], + path_in_repo: Optional[str] = None, + commit_message: Optional[str] = None, + commit_description: Optional[str] = None, + token: Union[str, bool, None] = None, + repo_type: Optional[str] = None, + revision: Optional[str] = None, + create_pr: Optional[bool] = None, + parent_commit: Optional[str] = None, + allow_patterns: Optional[Union[List[str], str]] = None, + ignore_patterns: Optional[Union[List[str], str]] = None, + delete_patterns: Optional[Union[List[str], str]] = None, + run_as_future: Literal[False] = ..., + ) -> CommitInfo: ... + + @overload + def upload_folder( # type: ignore + self, + *, + repo_id: str, + folder_path: Union[str, Path], + path_in_repo: Optional[str] = None, + commit_message: Optional[str] = None, + commit_description: Optional[str] = None, + token: Union[str, bool, None] = None, + repo_type: Optional[str] = None, + revision: Optional[str] = None, + create_pr: Optional[bool] = None, + parent_commit: Optional[str] = None, + allow_patterns: Optional[Union[List[str], str]] = None, + ignore_patterns: Optional[Union[List[str], str]] = None, + delete_patterns: Optional[Union[List[str], str]] = None, + run_as_future: Literal[True] = ..., + ) -> Future[CommitInfo]: ... + + @validate_hf_hub_args + @future_compatible + def upload_folder( + self, + *, + repo_id: str, + folder_path: Union[str, Path], + path_in_repo: Optional[str] = None, + commit_message: Optional[str] = None, + commit_description: Optional[str] = None, + token: Union[str, bool, None] = None, + repo_type: Optional[str] = None, + revision: Optional[str] = None, + create_pr: Optional[bool] = None, + parent_commit: Optional[str] = None, + allow_patterns: Optional[Union[List[str], str]] = None, + ignore_patterns: Optional[Union[List[str], str]] = None, + delete_patterns: Optional[Union[List[str], str]] = None, + run_as_future: bool = False, + ) -> Union[CommitInfo, Future[CommitInfo]]: + """ + Upload a local folder to the given repo. The upload is done through a HTTP requests, and doesn't require git or + git-lfs to be installed. + + The structure of the folder will be preserved. Files with the same name already present in the repository will + be overwritten. Others will be left untouched. + + Use the `allow_patterns` and `ignore_patterns` arguments to specify which files to upload. These parameters + accept either a single pattern or a list of patterns. Patterns are Standard Wildcards (globbing patterns) as + documented [here](https://tldp.org/LDP/GNU-Linux-Tools-Summary/html/x11655.htm). If both `allow_patterns` and + `ignore_patterns` are provided, both constraints apply. By default, all files from the folder are uploaded. + + Use the `delete_patterns` argument to specify remote files you want to delete. Input type is the same as for + `allow_patterns` (see above). If `path_in_repo` is also provided, the patterns are matched against paths + relative to this folder. For example, `upload_folder(..., path_in_repo="experiment", delete_patterns="logs/*")` + will delete any remote file under `./experiment/logs/`. Note that the `.gitattributes` file will not be deleted + even if it matches the patterns. + + Any `.git/` folder present in any subdirectory will be ignored. However, please be aware that the `.gitignore` + file is not taken into account. + + Uses `HfApi.create_commit` under the hood. + + Args: + repo_id (`str`): + The repository to which the file will be uploaded, for example: + `"username/custom_transformers"` + folder_path (`str` or `Path`): + Path to the folder to upload on the local file system + path_in_repo (`str`, *optional*): + Relative path of the directory in the repo, for example: + `"checkpoints/1fec34a/results"`. Will default to the root folder of the repository. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if uploading to a dataset or + space, `None` or `"model"` if uploading to a model. Default is + `None`. + revision (`str`, *optional*): + The git revision to commit from. Defaults to the head of the `"main"` branch. + commit_message (`str`, *optional*): + The summary / title / first line of the generated commit. Defaults to: + `f"Upload {path_in_repo} with huggingface_hub"` + commit_description (`str` *optional*): + The description of the generated commit + create_pr (`boolean`, *optional*): + Whether or not to create a Pull Request with that commit. Defaults to `False`. If `revision` is not + set, PR is opened against the `"main"` branch. If `revision` is set and is a branch, PR is opened + against this branch. If `revision` is set and is not a branch name (example: a commit oid), an + `RevisionNotFoundError` is returned by the server. + parent_commit (`str`, *optional*): + The OID / SHA of the parent commit, as a hexadecimal string. Shorthands (7 first characters) are also supported. + If specified and `create_pr` is `False`, the commit will fail if `revision` does not point to `parent_commit`. + If specified and `create_pr` is `True`, the pull request will be created from `parent_commit`. + Specifying `parent_commit` ensures the repo has not changed before committing the changes, and can be + especially useful if the repo is updated / committed to concurrently. + allow_patterns (`List[str]` or `str`, *optional*): + If provided, only files matching at least one pattern are uploaded. + ignore_patterns (`List[str]` or `str`, *optional*): + If provided, files matching any of the patterns are not uploaded. + delete_patterns (`List[str]` or `str`, *optional*): + If provided, remote files matching any of the patterns will be deleted from the repo while committing + new files. This is useful if you don't know which files have already been uploaded. + Note: to avoid discrepancies the `.gitattributes` file is not deleted even if it matches the pattern. + run_as_future (`bool`, *optional*): + Whether or not to run this method in the background. Background jobs are run sequentially without + blocking the main thread. Passing `run_as_future=True` will return a [Future](https://docs.python.org/3/library/concurrent.futures.html#future-objects) + object. Defaults to `False`. + + Returns: + [`CommitInfo`] or `Future`: + Instance of [`CommitInfo`] containing information about the newly created commit (commit hash, commit + url, pr url, commit message,...). If `run_as_future=True` is passed, returns a Future object which will + contain the result when executed. + + > [!TIP] + > Raises the following errors: + > + > - [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) + > if the HuggingFace API returned an error + > - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + > if some parameter value is invalid + + > [!WARNING] + > `upload_folder` assumes that the repo already exists on the Hub. If you get a Client error 404, please make + > sure you are authenticated and that `repo_id` and `repo_type` are set correctly. If repo does not exist, create + > it first using [`~hf_api.create_repo`]. + + > [!TIP] + > When dealing with a large folder (thousands of files or hundreds of GB), we recommend using [`~hf_api.upload_large_folder`] instead. + + Example: + + ```python + # Upload checkpoints folder except the log files + >>> upload_folder( + ... folder_path="local/checkpoints", + ... path_in_repo="remote/experiment/checkpoints", + ... repo_id="username/my-dataset", + ... repo_type="datasets", + ... token="my_token", + ... ignore_patterns="**/logs/*.txt", + ... ) + # "https://huggingface.co/datasets/username/my-dataset/tree/main/remote/experiment/checkpoints" + + # Upload checkpoints folder including logs while deleting existing logs from the repo + # Useful if you don't know exactly which log files have already being pushed + >>> upload_folder( + ... folder_path="local/checkpoints", + ... path_in_repo="remote/experiment/checkpoints", + ... repo_id="username/my-dataset", + ... repo_type="datasets", + ... token="my_token", + ... delete_patterns="**/logs/*.txt", + ... ) + "https://huggingface.co/datasets/username/my-dataset/tree/main/remote/experiment/checkpoints" + + # Upload checkpoints folder while creating a PR + >>> upload_folder( + ... folder_path="local/checkpoints", + ... path_in_repo="remote/experiment/checkpoints", + ... repo_id="username/my-dataset", + ... repo_type="datasets", + ... token="my_token", + ... create_pr=True, + ... ) + "https://huggingface.co/datasets/username/my-dataset/tree/refs%2Fpr%2F1/remote/experiment/checkpoints" + + ``` + """ + if repo_type not in constants.REPO_TYPES: + raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") + + # By default, upload folder to the root directory in repo. + if path_in_repo is None: + path_in_repo = "" + + # Do not upload .git folder + if ignore_patterns is None: + ignore_patterns = [] + elif isinstance(ignore_patterns, str): + ignore_patterns = [ignore_patterns] + ignore_patterns += DEFAULT_IGNORE_PATTERNS + + delete_operations = self._prepare_folder_deletions( + repo_id=repo_id, + repo_type=repo_type, + revision=constants.DEFAULT_REVISION if create_pr else revision, + token=token, + path_in_repo=path_in_repo, + delete_patterns=delete_patterns, + ) + add_operations = self._prepare_upload_folder_additions( + folder_path, + path_in_repo, + allow_patterns=allow_patterns, + ignore_patterns=ignore_patterns, + token=token, + repo_type=repo_type, + ) + + # Optimize operations: if some files will be overwritten, we don't need to delete them first + if len(add_operations) > 0: + added_paths = set(op.path_in_repo for op in add_operations) + delete_operations = [ + delete_op for delete_op in delete_operations if delete_op.path_in_repo not in added_paths + ] + commit_operations = delete_operations + add_operations + + commit_message = commit_message or "Upload folder using huggingface_hub" + + commit_info = self.create_commit( + repo_type=repo_type, + repo_id=repo_id, + operations=commit_operations, + commit_message=commit_message, + commit_description=commit_description, + token=token, + revision=revision, + create_pr=create_pr, + parent_commit=parent_commit, + ) + + # Create url to uploaded folder (for legacy return value) + if create_pr and commit_info.pr_url is not None: + revision = quote(_parse_revision_from_pr_url(commit_info.pr_url), safe="") + if repo_type in constants.REPO_TYPES_URL_PREFIXES: + repo_id = constants.REPO_TYPES_URL_PREFIXES[repo_type] + repo_id + revision = revision if revision is not None else constants.DEFAULT_REVISION + + return CommitInfo( + commit_url=commit_info.commit_url, + commit_message=commit_info.commit_message, + commit_description=commit_info.commit_description, + oid=commit_info.oid, + pr_url=commit_info.pr_url, + # Similar to `hf_hub_url` but it's "tree" instead of "resolve" + # TODO: remove this in v1.0 + _url=f"{self.endpoint}/{repo_id}/tree/{revision}/{path_in_repo}", + ) + + @validate_hf_hub_args + def delete_file( + self, + path_in_repo: str, + repo_id: str, + *, + token: Union[str, bool, None] = None, + repo_type: Optional[str] = None, + revision: Optional[str] = None, + commit_message: Optional[str] = None, + commit_description: Optional[str] = None, + create_pr: Optional[bool] = None, + parent_commit: Optional[str] = None, + ) -> CommitInfo: + """ + Deletes a file in the given repo. + + Args: + path_in_repo (`str`): + Relative filepath in the repo, for example: + `"checkpoints/1fec34a/weights.bin"` + repo_id (`str`): + The repository from which the file will be deleted, for example: + `"username/custom_transformers"` + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if the file is in a dataset or + space, `None` or `"model"` if in a model. Default is `None`. + revision (`str`, *optional*): + The git revision to commit from. Defaults to the head of the `"main"` branch. + commit_message (`str`, *optional*): + The summary / title / first line of the generated commit. Defaults to + `f"Delete {path_in_repo} with huggingface_hub"`. + commit_description (`str` *optional*) + The description of the generated commit + create_pr (`boolean`, *optional*): + Whether or not to create a Pull Request with that commit. Defaults to `False`. + If `revision` is not set, PR is opened against the `"main"` branch. If + `revision` is set and is a branch, PR is opened against this branch. If + `revision` is set and is not a branch name (example: a commit oid), an + `RevisionNotFoundError` is returned by the server. + parent_commit (`str`, *optional*): + The OID / SHA of the parent commit, as a hexadecimal string. Shorthands (7 first characters) are also supported. + If specified and `create_pr` is `False`, the commit will fail if `revision` does not point to `parent_commit`. + If specified and `create_pr` is `True`, the pull request will be created from `parent_commit`. + Specifying `parent_commit` ensures the repo has not changed before committing the changes, and can be + especially useful if the repo is updated / committed to concurrently. + + + > [!TIP] + > Raises the following errors: + > + > - [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) + > if the HuggingFace API returned an error + > - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + > if some parameter value is invalid + > - [`~utils.RepositoryNotFoundError`] + > If the repository to download from cannot be found. This may be because it doesn't exist, + > or because it is set to `private` and you do not have access. + > - [`~utils.RevisionNotFoundError`] + > If the revision to download from cannot be found. + > - [`~utils.EntryNotFoundError`] + > If the file to download cannot be found. + + """ + commit_message = ( + commit_message if commit_message is not None else f"Delete {path_in_repo} with huggingface_hub" + ) + + operations = [CommitOperationDelete(path_in_repo=path_in_repo)] + + return self.create_commit( + repo_id=repo_id, + repo_type=repo_type, + token=token, + operations=operations, + revision=revision, + commit_message=commit_message, + commit_description=commit_description, + create_pr=create_pr, + parent_commit=parent_commit, + ) + + @validate_hf_hub_args + def delete_files( + self, + repo_id: str, + delete_patterns: List[str], + *, + token: Union[bool, str, None] = None, + repo_type: Optional[str] = None, + revision: Optional[str] = None, + commit_message: Optional[str] = None, + commit_description: Optional[str] = None, + create_pr: Optional[bool] = None, + parent_commit: Optional[str] = None, + ) -> CommitInfo: + """ + Delete files from a repository on the Hub. + + If a folder path is provided, the entire folder is deleted as well as + all files it contained. + + Args: + repo_id (`str`): + The repository from which the folder will be deleted, for example: + `"username/custom_transformers"` + delete_patterns (`List[str]`): + List of files or folders to delete. Each string can either be + a file path, a folder path or a Unix shell-style wildcard. + E.g. `["file.txt", "folder/", "data/*.parquet"]` + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + to the stored token. + repo_type (`str`, *optional*): + Type of the repo to delete files from. Can be `"model"`, + `"dataset"` or `"space"`. Defaults to `"model"`. + revision (`str`, *optional*): + The git revision to commit from. Defaults to the head of the `"main"` branch. + commit_message (`str`, *optional*): + The summary (first line) of the generated commit. Defaults to + `f"Delete files using huggingface_hub"`. + commit_description (`str` *optional*) + The description of the generated commit. + create_pr (`boolean`, *optional*): + Whether or not to create a Pull Request with that commit. Defaults to `False`. + If `revision` is not set, PR is opened against the `"main"` branch. If + `revision` is set and is a branch, PR is opened against this branch. If + `revision` is set and is not a branch name (example: a commit oid), an + `RevisionNotFoundError` is returned by the server. + parent_commit (`str`, *optional*): + The OID / SHA of the parent commit, as a hexadecimal string. Shorthands (7 first characters) are also supported. + If specified and `create_pr` is `False`, the commit will fail if `revision` does not point to `parent_commit`. + If specified and `create_pr` is `True`, the pull request will be created from `parent_commit`. + Specifying `parent_commit` ensures the repo has not changed before committing the changes, and can be + especially useful if the repo is updated / committed to concurrently. + """ + operations = self._prepare_folder_deletions( + repo_id=repo_id, repo_type=repo_type, delete_patterns=delete_patterns, path_in_repo="", revision=revision + ) + + if commit_message is None: + commit_message = f"Delete files {' '.join(delete_patterns)} with huggingface_hub" + + return self.create_commit( + repo_id=repo_id, + repo_type=repo_type, + token=token, + operations=operations, + revision=revision, + commit_message=commit_message, + commit_description=commit_description, + create_pr=create_pr, + parent_commit=parent_commit, + ) + + @validate_hf_hub_args + def delete_folder( + self, + path_in_repo: str, + repo_id: str, + *, + token: Union[bool, str, None] = None, + repo_type: Optional[str] = None, + revision: Optional[str] = None, + commit_message: Optional[str] = None, + commit_description: Optional[str] = None, + create_pr: Optional[bool] = None, + parent_commit: Optional[str] = None, + ) -> CommitInfo: + """ + Deletes a folder in the given repo. + + Simple wrapper around [`create_commit`] method. + + Args: + path_in_repo (`str`): + Relative folder path in the repo, for example: `"checkpoints/1fec34a"`. + repo_id (`str`): + The repository from which the folder will be deleted, for example: + `"username/custom_transformers"` + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + to the stored token. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if the folder is in a dataset or + space, `None` or `"model"` if in a model. Default is `None`. + revision (`str`, *optional*): + The git revision to commit from. Defaults to the head of the `"main"` branch. + commit_message (`str`, *optional*): + The summary / title / first line of the generated commit. Defaults to + `f"Delete folder {path_in_repo} with huggingface_hub"`. + commit_description (`str` *optional*) + The description of the generated commit. + create_pr (`boolean`, *optional*): + Whether or not to create a Pull Request with that commit. Defaults to `False`. + If `revision` is not set, PR is opened against the `"main"` branch. If + `revision` is set and is a branch, PR is opened against this branch. If + `revision` is set and is not a branch name (example: a commit oid), an + `RevisionNotFoundError` is returned by the server. + parent_commit (`str`, *optional*): + The OID / SHA of the parent commit, as a hexadecimal string. Shorthands (7 first characters) are also supported. + If specified and `create_pr` is `False`, the commit will fail if `revision` does not point to `parent_commit`. + If specified and `create_pr` is `True`, the pull request will be created from `parent_commit`. + Specifying `parent_commit` ensures the repo has not changed before committing the changes, and can be + especially useful if the repo is updated / committed to concurrently. + """ + return self.create_commit( + repo_id=repo_id, + repo_type=repo_type, + token=token, + operations=[CommitOperationDelete(path_in_repo=path_in_repo, is_folder=True)], + revision=revision, + commit_message=( + commit_message if commit_message is not None else f"Delete folder {path_in_repo} with huggingface_hub" + ), + commit_description=commit_description, + create_pr=create_pr, + parent_commit=parent_commit, + ) + + def upload_large_folder( + self, + repo_id: str, + folder_path: Union[str, Path], + *, + repo_type: str, # Repo type is required! + revision: Optional[str] = None, + private: Optional[bool] = None, + allow_patterns: Optional[Union[List[str], str]] = None, + ignore_patterns: Optional[Union[List[str], str]] = None, + num_workers: Optional[int] = None, + print_report: bool = True, + print_report_every: int = 60, + ) -> None: + """Upload a large folder to the Hub in the most resilient way possible. + + Several workers are started to upload files in an optimized way. Before being committed to a repo, files must be + hashed and be pre-uploaded if they are LFS files. Workers will perform these tasks for each file in the folder. + At each step, some metadata information about the upload process is saved in the folder under `.cache/.huggingface/` + to be able to resume the process if interrupted. The whole process might result in several commits. + + Args: + repo_id (`str`): + The repository to which the file will be uploaded. + E.g. `"HuggingFaceTB/smollm-corpus"`. + folder_path (`str` or `Path`): + Path to the folder to upload on the local file system. + repo_type (`str`): + Type of the repository. Must be one of `"model"`, `"dataset"` or `"space"`. + Unlike in all other `HfApi` methods, `repo_type` is explicitly required here. This is to avoid + any mistake when uploading a large folder to the Hub, and therefore prevent from having to re-upload + everything. + revision (`str`, `optional`): + The branch to commit to. If not provided, the `main` branch will be used. + private (`bool`, `optional`): + Whether the repository should be private. + If `None` (default), the repo will be public unless the organization's default is private. + allow_patterns (`List[str]` or `str`, *optional*): + If provided, only files matching at least one pattern are uploaded. + ignore_patterns (`List[str]` or `str`, *optional*): + If provided, files matching any of the patterns are not uploaded. + num_workers (`int`, *optional*): + Number of workers to start. Defaults to `os.cpu_count() - 2` (minimum 2). + A higher number of workers may speed up the process if your machine allows it. However, on machines with a + slower connection, it is recommended to keep the number of workers low to ensure better resumability. + Indeed, partially uploaded files will have to be completely re-uploaded if the process is interrupted. + print_report (`bool`, *optional*): + Whether to print a report of the upload progress. Defaults to True. + Report is printed to `sys.stdout` every X seconds (60 by defaults) and overwrites the previous report. + print_report_every (`int`, *optional*): + Frequency at which the report is printed. Defaults to 60 seconds. + + > [!TIP] + > A few things to keep in mind: + > - Repository limits still apply: https://huggingface.co/docs/hub/repositories-recommendations + > - Do not start several processes in parallel. + > - You can interrupt and resume the process at any time. + > - Do not upload the same folder to several repositories. If you need to do so, you must delete the local `.cache/.huggingface/` folder first. + + > [!WARNING] + > While being much more robust to upload large folders, `upload_large_folder` is more limited than [`upload_folder`] feature-wise. In practice: + > - you cannot set a custom `path_in_repo`. If you want to upload to a subfolder, you need to set the proper structure locally. + > - you cannot set a custom `commit_message` and `commit_description` since multiple commits are created. + > - you cannot delete from the repo while uploading. Please make a separate commit first. + > - you cannot create a PR directly. Please create a PR first (from the UI or using [`create_pull_request`]) and then commit to it by passing `revision`. + + **Technical details:** + + `upload_large_folder` process is as follow: + 1. (Check parameters and setup.) + 2. Create repo if missing. + 3. List local files to upload. + 4. Run validation checks and display warnings if repository limits might be exceeded: + - Warns if the total number of files exceeds 100k (recommended limit). + - Warns if any folder contains more than 10k files (recommended limit). + - Warns about files larger than 20GB (recommended) or 50GB (hard limit). + 5. Start workers. Workers can perform the following tasks: + - Hash a file. + - Get upload mode (regular or LFS) for a list of files. + - Pre-upload an LFS file. + - Commit a bunch of files. + Once a worker finishes a task, it will move on to the next task based on the priority list (see below) until + all files are uploaded and committed. + 6. While workers are up, regularly print a report to sys.stdout. + + Order of priority: + 1. Commit if more than 5 minutes since last commit attempt (and at least 1 file). + 2. Commit if at least 150 files are ready to commit. + 3. Get upload mode if at least 10 files have been hashed. + 4. Pre-upload LFS file if at least 1 file and no worker is pre-uploading. + 5. Hash file if at least 1 file and no worker is hashing. + 6. Get upload mode if at least 1 file and no worker is getting upload mode. + 7. Pre-upload LFS file if at least 1 file (exception: if hf_transfer is enabled, only 1 worker can preupload LFS at a time). + 8. Hash file if at least 1 file to hash. + 9. Get upload mode if at least 1 file to get upload mode. + 10. Commit if at least 1 file to commit and at least 1 min since last commit attempt. + 11. Commit if at least 1 file to commit and all other queues are empty. + + Special rules: + - If `hf_transfer` is enabled, only 1 LFS uploader at a time. Otherwise the CPU would be bloated by `hf_transfer`. + - Only one worker can commit at a time. + - If no tasks are available, the worker waits for 10 seconds before checking again. + """ + return upload_large_folder_internal( + self, + repo_id=repo_id, + folder_path=folder_path, + repo_type=repo_type, + revision=revision, + private=private, + allow_patterns=allow_patterns, + ignore_patterns=ignore_patterns, + num_workers=num_workers, + print_report=print_report, + print_report_every=print_report_every, + ) + + @validate_hf_hub_args + def get_hf_file_metadata( + self, + *, + url: str, + token: Union[bool, str, None] = None, + proxies: Optional[Dict] = None, + timeout: Optional[float] = constants.DEFAULT_REQUEST_TIMEOUT, + ) -> HfFileMetadata: + """Fetch metadata of a file versioned on the Hub for a given url. + + Args: + url (`str`): + File url, for example returned by [`hf_hub_url`]. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + proxies (`dict`, *optional*): + Dictionary mapping protocol to the URL of the proxy passed to `requests.request`. + timeout (`float`, *optional*, defaults to 10): + How many seconds to wait for the server to send metadata before giving up. + + Returns: + A [`HfFileMetadata`] object containing metadata such as location, etag, size and commit_hash. + """ + if token is None: + # Cannot do `token = token or self.token` as token can be `False`. + token = self.token + + return get_hf_file_metadata( + url=url, + token=token, + proxies=proxies, + timeout=timeout, + library_name=self.library_name, + library_version=self.library_version, + user_agent=self.user_agent, + endpoint=self.endpoint, + ) + + @validate_hf_hub_args + def hf_hub_download( + self, + repo_id: str, + filename: str, + *, + subfolder: Optional[str] = None, + repo_type: Optional[str] = None, + revision: Optional[str] = None, + cache_dir: Union[str, Path, None] = None, + local_dir: Union[str, Path, None] = None, + force_download: bool = False, + proxies: Optional[Dict] = None, + etag_timeout: float = constants.DEFAULT_ETAG_TIMEOUT, + token: Union[bool, str, None] = None, + local_files_only: bool = False, + # Deprecated args + resume_download: Optional[bool] = None, + force_filename: Optional[str] = None, + local_dir_use_symlinks: Union[bool, Literal["auto"]] = "auto", + ) -> str: + """Download a given file if it's not already present in the local cache. + + The new cache file layout looks like this: + - The cache directory contains one subfolder per repo_id (namespaced by repo type) + - inside each repo folder: + - refs is a list of the latest known revision => commit_hash pairs + - blobs contains the actual file blobs (identified by their git-sha or sha256, depending on + whether they're LFS files or not) + - snapshots contains one subfolder per commit, each "commit" contains the subset of the files + that have been resolved at that particular commit. Each filename is a symlink to the blob + at that particular commit. + + ``` + [ 96] . + └── [ 160] models--julien-c--EsperBERTo-small + ├── [ 160] blobs + │ ├── [321M] 403450e234d65943a7dcf7e05a771ce3c92faa84dd07db4ac20f592037a1e4bd + │ ├── [ 398] 7cb18dc9bafbfcf74629a4b760af1b160957a83e + │ └── [1.4K] d7edf6bd2a681fb0175f7735299831ee1b22b812 + ├── [ 96] refs + │ └── [ 40] main + └── [ 128] snapshots + ├── [ 128] 2439f60ef33a0d46d85da5001d52aeda5b00ce9f + │ ├── [ 52] README.md -> ../../blobs/d7edf6bd2a681fb0175f7735299831ee1b22b812 + │ └── [ 76] pytorch_model.bin -> ../../blobs/403450e234d65943a7dcf7e05a771ce3c92faa84dd07db4ac20f592037a1e4bd + └── [ 128] bbc77c8132af1cc5cf678da3f1ddf2de43606d48 + ├── [ 52] README.md -> ../../blobs/7cb18dc9bafbfcf74629a4b760af1b160957a83e + └── [ 76] pytorch_model.bin -> ../../blobs/403450e234d65943a7dcf7e05a771ce3c92faa84dd07db4ac20f592037a1e4bd + ``` + + If `local_dir` is provided, the file structure from the repo will be replicated in this location. When using this + option, the `cache_dir` will not be used and a `.cache/huggingface/` folder will be created at the root of `local_dir` + to store some metadata related to the downloaded files. While this mechanism is not as robust as the main + cache-system, it's optimized for regularly pulling the latest version of a repository. + + Args: + repo_id (`str`): + A user or an organization name and a repo name separated by a `/`. + filename (`str`): + The name of the file in the repo. + subfolder (`str`, *optional*): + An optional value corresponding to a folder inside the repository. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if downloading from a dataset or space, + `None` or `"model"` if downloading from a model. Default is `None`. + revision (`str`, *optional*): + An optional Git revision id which can be a branch name, a tag, or a + commit hash. + cache_dir (`str`, `Path`, *optional*): + Path to the folder where cached files are stored. + local_dir (`str` or `Path`, *optional*): + If provided, the downloaded file will be placed under this directory. + force_download (`bool`, *optional*, defaults to `False`): + Whether the file should be downloaded even if it already exists in + the local cache. + proxies (`dict`, *optional*): + Dictionary mapping protocol to the URL of the proxy passed to + `requests.request`. + etag_timeout (`float`, *optional*, defaults to `10`): + When fetching ETag, how many seconds to wait for the server to send + data before giving up which is passed to `requests.request`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + local_files_only (`bool`, *optional*, defaults to `False`): + If `True`, avoid downloading the file and return the path to the + local cached file if it exists. + + Returns: + `str`: Local path of file or if networking is off, last version of file cached on disk. + + Raises: + [`~utils.RepositoryNotFoundError`] + If the repository to download from cannot be found. This may be because it doesn't exist, + or because it is set to `private` and you do not have access. + [`~utils.RevisionNotFoundError`] + If the revision to download from cannot be found. + [`~utils.EntryNotFoundError`] + If the file to download cannot be found. + [`~utils.LocalEntryNotFoundError`] + If network is disabled or unavailable and file is not found in cache. + [`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError) + If `token=True` but the token cannot be found. + [`OSError`](https://docs.python.org/3/library/exceptions.html#OSError) + If ETag cannot be determined. + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + If some parameter value is invalid. + """ + from .file_download import hf_hub_download + + if token is None: + # Cannot do `token = token or self.token` as token can be `False`. + token = self.token + + return hf_hub_download( + repo_id=repo_id, + filename=filename, + subfolder=subfolder, + repo_type=repo_type, + revision=revision, + endpoint=self.endpoint, + library_name=self.library_name, + library_version=self.library_version, + cache_dir=cache_dir, + local_dir=local_dir, + local_dir_use_symlinks=local_dir_use_symlinks, + user_agent=self.user_agent, + force_download=force_download, + force_filename=force_filename, + proxies=proxies, + etag_timeout=etag_timeout, + resume_download=resume_download, + token=token, + headers=self.headers, + local_files_only=local_files_only, + ) + + @validate_hf_hub_args + def snapshot_download( + self, + repo_id: str, + *, + repo_type: Optional[str] = None, + revision: Optional[str] = None, + cache_dir: Union[str, Path, None] = None, + local_dir: Union[str, Path, None] = None, + proxies: Optional[Dict] = None, + etag_timeout: float = constants.DEFAULT_ETAG_TIMEOUT, + force_download: bool = False, + token: Union[bool, str, None] = None, + local_files_only: bool = False, + allow_patterns: Optional[Union[List[str], str]] = None, + ignore_patterns: Optional[Union[List[str], str]] = None, + max_workers: int = 8, + tqdm_class: Optional[Type[base_tqdm]] = None, + # Deprecated args + local_dir_use_symlinks: Union[bool, Literal["auto"]] = "auto", + resume_download: Optional[bool] = None, + ) -> str: + """Download repo files. + + Download a whole snapshot of a repo's files at the specified revision. This is useful when you want all files from + a repo, because you don't know which ones you will need a priori. All files are nested inside a folder in order + to keep their actual filename relative to that folder. You can also filter which files to download using + `allow_patterns` and `ignore_patterns`. + + If `local_dir` is provided, the file structure from the repo will be replicated in this location. When using this + option, the `cache_dir` will not be used and a `.cache/huggingface/` folder will be created at the root of `local_dir` + to store some metadata related to the downloaded files.While this mechanism is not as robust as the main + cache-system, it's optimized for regularly pulling the latest version of a repository. + + An alternative would be to clone the repo but this requires git and git-lfs to be installed and properly + configured. It is also not possible to filter which files to download when cloning a repository using git. + + Args: + repo_id (`str`): + A user or an organization name and a repo name separated by a `/`. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if downloading from a dataset or space, + `None` or `"model"` if downloading from a model. Default is `None`. + revision (`str`, *optional*): + An optional Git revision id which can be a branch name, a tag, or a + commit hash. + cache_dir (`str`, `Path`, *optional*): + Path to the folder where cached files are stored. + local_dir (`str` or `Path`, *optional*): + If provided, the downloaded files will be placed under this directory. + proxies (`dict`, *optional*): + Dictionary mapping protocol to the URL of the proxy passed to + `requests.request`. + etag_timeout (`float`, *optional*, defaults to `10`): + When fetching ETag, how many seconds to wait for the server to send + data before giving up which is passed to `requests.request`. + force_download (`bool`, *optional*, defaults to `False`): + Whether the file should be downloaded even if it already exists in the local cache. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + local_files_only (`bool`, *optional*, defaults to `False`): + If `True`, avoid downloading the file and return the path to the + local cached file if it exists. + allow_patterns (`List[str]` or `str`, *optional*): + If provided, only files matching at least one pattern are downloaded. + ignore_patterns (`List[str]` or `str`, *optional*): + If provided, files matching any of the patterns are not downloaded. + max_workers (`int`, *optional*): + Number of concurrent threads to download files (1 thread = 1 file download). + Defaults to 8. + tqdm_class (`tqdm`, *optional*): + If provided, overwrites the default behavior for the progress bar. Passed + argument must inherit from `tqdm.auto.tqdm` or at least mimic its behavior. + Note that the `tqdm_class` is not passed to each individual download. + Defaults to the custom HF progress bar that can be disabled by setting + `HF_HUB_DISABLE_PROGRESS_BARS` environment variable. + + Returns: + `str`: folder path of the repo snapshot. + + Raises: + [`~utils.RepositoryNotFoundError`] + If the repository to download from cannot be found. This may be because it doesn't exist, + or because it is set to `private` and you do not have access. + [`~utils.RevisionNotFoundError`] + If the revision to download from cannot be found. + [`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError) + If `token=True` and the token cannot be found. + [`OSError`](https://docs.python.org/3/library/exceptions.html#OSError) if + ETag cannot be determined. + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + if some parameter value is invalid. + """ + from ._snapshot_download import snapshot_download + + if token is None: + # Cannot do `token = token or self.token` as token can be `False`. + token = self.token + + return snapshot_download( + repo_id=repo_id, + repo_type=repo_type, + revision=revision, + endpoint=self.endpoint, + cache_dir=cache_dir, + local_dir=local_dir, + local_dir_use_symlinks=local_dir_use_symlinks, + library_name=self.library_name, + library_version=self.library_version, + user_agent=self.user_agent, + proxies=proxies, + etag_timeout=etag_timeout, + resume_download=resume_download, + force_download=force_download, + token=token, + local_files_only=local_files_only, + allow_patterns=allow_patterns, + ignore_patterns=ignore_patterns, + max_workers=max_workers, + tqdm_class=tqdm_class, + ) + + def get_safetensors_metadata( + self, + repo_id: str, + *, + repo_type: Optional[str] = None, + revision: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> SafetensorsRepoMetadata: + """ + Parse metadata for a safetensors repo on the Hub. + + We first check if the repo has a single safetensors file or a sharded safetensors repo. If it's a single + safetensors file, we parse the metadata from this file. If it's a sharded safetensors repo, we parse the + metadata from the index file and then parse the metadata from each shard. + + To parse metadata from a single safetensors file, use [`parse_safetensors_file_metadata`]. + + For more details regarding the safetensors format, check out https://huggingface.co/docs/safetensors/index#format. + + Args: + repo_id (`str`): + A user or an organization name and a repo name separated by a `/`. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if the file is in a dataset or space, `None` or `"model"` if in a + model. Default is `None`. + revision (`str`, *optional*): + The git revision to fetch the file from. Can be a branch name, a tag, or a commit hash. Defaults to the + head of the `"main"` branch. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + [`SafetensorsRepoMetadata`]: information related to safetensors repo. + + Raises: + [`NotASafetensorsRepoError`] + If the repo is not a safetensors repo i.e. doesn't have either a + `model.safetensors` or a `model.safetensors.index.json` file. + [`SafetensorsParsingError`] + If a safetensors file header couldn't be parsed correctly. + + Example: + ```py + # Parse repo with single weights file + >>> metadata = get_safetensors_metadata("bigscience/bloomz-560m") + >>> metadata + SafetensorsRepoMetadata( + metadata=None, + sharded=False, + weight_map={'h.0.input_layernorm.bias': 'model.safetensors', ...}, + files_metadata={'model.safetensors': SafetensorsFileMetadata(...)} + ) + >>> metadata.files_metadata["model.safetensors"].metadata + {'format': 'pt'} + + # Parse repo with sharded model + >>> metadata = get_safetensors_metadata("bigscience/bloom") + Parse safetensors files: 100%|██████████████████████████████████████████| 72/72 [00:12<00:00, 5.78it/s] + >>> metadata + SafetensorsRepoMetadata(metadata={'total_size': 352494542848}, sharded=True, weight_map={...}, files_metadata={...}) + >>> len(metadata.files_metadata) + 72 # All safetensors files have been fetched + + # Parse repo with sharded model + >>> get_safetensors_metadata("runwayml/stable-diffusion-v1-5") + NotASafetensorsRepoError: 'runwayml/stable-diffusion-v1-5' is not a safetensors repo. Couldn't find 'model.safetensors.index.json' or 'model.safetensors' files. + ``` + """ + if self.file_exists( # Single safetensors file => non-sharded model + repo_id=repo_id, + filename=constants.SAFETENSORS_SINGLE_FILE, + repo_type=repo_type, + revision=revision, + token=token, + ): + file_metadata = self.parse_safetensors_file_metadata( + repo_id=repo_id, + filename=constants.SAFETENSORS_SINGLE_FILE, + repo_type=repo_type, + revision=revision, + token=token, + ) + return SafetensorsRepoMetadata( + metadata=None, + sharded=False, + weight_map={ + tensor_name: constants.SAFETENSORS_SINGLE_FILE for tensor_name in file_metadata.tensors.keys() + }, + files_metadata={constants.SAFETENSORS_SINGLE_FILE: file_metadata}, + ) + elif self.file_exists( # Multiple safetensors files => sharded with index + repo_id=repo_id, + filename=constants.SAFETENSORS_INDEX_FILE, + repo_type=repo_type, + revision=revision, + token=token, + ): + # Fetch index + index_file = self.hf_hub_download( + repo_id=repo_id, + filename=constants.SAFETENSORS_INDEX_FILE, + repo_type=repo_type, + revision=revision, + token=token, + ) + with open(index_file) as f: + index = json.load(f) + + weight_map = index.get("weight_map", {}) + + # Fetch metadata per shard + files_metadata = {} + + def _parse(filename: str) -> None: + files_metadata[filename] = self.parse_safetensors_file_metadata( + repo_id=repo_id, filename=filename, repo_type=repo_type, revision=revision, token=token + ) + + thread_map( + _parse, + set(weight_map.values()), + desc="Parse safetensors files", + tqdm_class=hf_tqdm, + ) + + return SafetensorsRepoMetadata( + metadata=index.get("metadata", None), + sharded=True, + weight_map=weight_map, + files_metadata=files_metadata, + ) + else: + # Not a safetensors repo + raise NotASafetensorsRepoError( + f"'{repo_id}' is not a safetensors repo. Couldn't find '{constants.SAFETENSORS_INDEX_FILE}' or '{constants.SAFETENSORS_SINGLE_FILE}' files." + ) + + def parse_safetensors_file_metadata( + self, + repo_id: str, + filename: str, + *, + repo_type: Optional[str] = None, + revision: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> SafetensorsFileMetadata: + """ + Parse metadata from a safetensors file on the Hub. + + To parse metadata from all safetensors files in a repo at once, use [`get_safetensors_metadata`]. + + For more details regarding the safetensors format, check out https://huggingface.co/docs/safetensors/index#format. + + Args: + repo_id (`str`): + A user or an organization name and a repo name separated by a `/`. + filename (`str`): + The name of the file in the repo. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if the file is in a dataset or space, `None` or `"model"` if in a + model. Default is `None`. + revision (`str`, *optional*): + The git revision to fetch the file from. Can be a branch name, a tag, or a commit hash. Defaults to the + head of the `"main"` branch. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + [`SafetensorsFileMetadata`]: information related to a safetensors file. + + Raises: + [`NotASafetensorsRepoError`]: + If the repo is not a safetensors repo i.e. doesn't have either a + `model.safetensors` or a `model.safetensors.index.json` file. + [`SafetensorsParsingError`]: + If a safetensors file header couldn't be parsed correctly. + """ + url = hf_hub_url( + repo_id=repo_id, filename=filename, repo_type=repo_type, revision=revision, endpoint=self.endpoint + ) + _headers = self._build_hf_headers(token=token) + + # 1. Fetch first 100kb + # Empirically, 97% of safetensors files have a metadata size < 100kb (over the top 1000 models on the Hub). + # We assume fetching 100kb is faster than making 2 GET requests. Therefore we always fetch the first 100kb to + # avoid the 2nd GET in most cases. + # See https://github.com/huggingface/huggingface_hub/pull/1855#discussion_r1404286419. + response = get_session().get(url, headers={**_headers, "range": "bytes=0-100000"}) + hf_raise_for_status(response) + + # 2. Parse metadata size + metadata_size = struct.unpack(" constants.SAFETENSORS_MAX_HEADER_LENGTH: + raise SafetensorsParsingError( + f"Failed to parse safetensors header for '{filename}' (repo '{repo_id}', revision " + f"'{revision or constants.DEFAULT_REVISION}'): safetensors header is too big. Maximum supported size is " + f"{constants.SAFETENSORS_MAX_HEADER_LENGTH} bytes (got {metadata_size})." + ) + + # 3.a. Get metadata from payload + if metadata_size <= 100000: + metadata_as_bytes = response.content[8 : 8 + metadata_size] + else: # 3.b. Request full metadata + response = get_session().get(url, headers={**_headers, "range": f"bytes=8-{metadata_size + 7}"}) + hf_raise_for_status(response) + metadata_as_bytes = response.content + + # 4. Parse json header + try: + metadata_as_dict = json.loads(metadata_as_bytes.decode(errors="ignore")) + except json.JSONDecodeError as e: + raise SafetensorsParsingError( + f"Failed to parse safetensors header for '{filename}' (repo '{repo_id}', revision " + f"'{revision or constants.DEFAULT_REVISION}'): header is not json-encoded string. Please make sure this is a " + "correctly formatted safetensors file." + ) from e + + try: + return SafetensorsFileMetadata( + metadata=metadata_as_dict.get("__metadata__", {}), + tensors={ + key: TensorInfo( + dtype=tensor["dtype"], + shape=tensor["shape"], + data_offsets=tuple(tensor["data_offsets"]), # type: ignore + ) + for key, tensor in metadata_as_dict.items() + if key != "__metadata__" + }, + ) + except (KeyError, IndexError) as e: + raise SafetensorsParsingError( + f"Failed to parse safetensors header for '{filename}' (repo '{repo_id}', revision " + f"'{revision or constants.DEFAULT_REVISION}'): header format not recognized. Please make sure this is a correctly" + " formatted safetensors file." + ) from e + + @validate_hf_hub_args + def create_branch( + self, + repo_id: str, + *, + branch: str, + revision: Optional[str] = None, + token: Union[bool, str, None] = None, + repo_type: Optional[str] = None, + exist_ok: bool = False, + ) -> None: + """ + Create a new branch for a repo on the Hub, starting from the specified revision (defaults to `main`). + To find a revision suiting your needs, you can use [`list_repo_refs`] or [`list_repo_commits`]. + + Args: + repo_id (`str`): + The repository in which the branch will be created. + Example: `"user/my-cool-model"`. + + branch (`str`): + The name of the branch to create. + + revision (`str`, *optional*): + The git revision to create the branch from. It can be a branch name or + the OID/SHA of a commit, as a hexadecimal string. Defaults to the head + of the `"main"` branch. + + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if creating a branch on a dataset or + space, `None` or `"model"` if tagging a model. Default is `None`. + + exist_ok (`bool`, *optional*, defaults to `False`): + If `True`, do not raise an error if branch already exists. + + Raises: + [`~utils.RepositoryNotFoundError`]: + If repository is not found (error 404): wrong repo_id/repo_type, private + but not authenticated or repo does not exist. + [`~utils.BadRequestError`]: + If invalid reference for a branch. Ex: `refs/pr/5` or 'refs/foo/bar'. + [`~utils.HfHubHTTPError`]: + If the branch already exists on the repo (error 409) and `exist_ok` is + set to `False`. + """ + if repo_type is None: + repo_type = constants.REPO_TYPE_MODEL + branch = quote(branch, safe="") + + # Prepare request + branch_url = f"{self.endpoint}/api/{repo_type}s/{repo_id}/branch/{branch}" + headers = self._build_hf_headers(token=token) + payload = {} + if revision is not None: + payload["startingPoint"] = revision + + # Create branch + response = get_session().post(url=branch_url, headers=headers, json=payload) + try: + hf_raise_for_status(response) + except HfHubHTTPError as e: + if exist_ok and e.response.status_code == 409: + return + elif exist_ok and e.response.status_code == 403: + # No write permission on the namespace but branch might already exist + try: + refs = self.list_repo_refs(repo_id=repo_id, repo_type=repo_type, token=token) + for branch_ref in refs.branches: + if branch_ref.name == branch: + return # Branch already exists => do not raise + except HfHubHTTPError: + pass # We raise the original error if the branch does not exist + raise + + @validate_hf_hub_args + def delete_branch( + self, + repo_id: str, + *, + branch: str, + token: Union[bool, str, None] = None, + repo_type: Optional[str] = None, + ) -> None: + """ + Delete a branch from a repo on the Hub. + + Args: + repo_id (`str`): + The repository in which a branch will be deleted. + Example: `"user/my-cool-model"`. + + branch (`str`): + The name of the branch to delete. + + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if creating a branch on a dataset or + space, `None` or `"model"` if tagging a model. Default is `None`. + + Raises: + [`~utils.RepositoryNotFoundError`]: + If repository is not found (error 404): wrong repo_id/repo_type, private + but not authenticated or repo does not exist. + [`~utils.HfHubHTTPError`]: + If trying to delete a protected branch. Ex: `main` cannot be deleted. + [`~utils.HfHubHTTPError`]: + If trying to delete a branch that does not exist. + + """ + if repo_type is None: + repo_type = constants.REPO_TYPE_MODEL + branch = quote(branch, safe="") + + # Prepare request + branch_url = f"{self.endpoint}/api/{repo_type}s/{repo_id}/branch/{branch}" + headers = self._build_hf_headers(token=token) + + # Delete branch + response = get_session().delete(url=branch_url, headers=headers) + hf_raise_for_status(response) + + @validate_hf_hub_args + def create_tag( + self, + repo_id: str, + *, + tag: str, + tag_message: Optional[str] = None, + revision: Optional[str] = None, + token: Union[bool, str, None] = None, + repo_type: Optional[str] = None, + exist_ok: bool = False, + ) -> None: + """ + Tag a given commit of a repo on the Hub. + + Args: + repo_id (`str`): + The repository in which a commit will be tagged. + Example: `"user/my-cool-model"`. + + tag (`str`): + The name of the tag to create. + + tag_message (`str`, *optional*): + The description of the tag to create. + + revision (`str`, *optional*): + The git revision to tag. It can be a branch name or the OID/SHA of a + commit, as a hexadecimal string. Shorthands (7 first characters) are + also supported. Defaults to the head of the `"main"` branch. + + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if tagging a dataset or + space, `None` or `"model"` if tagging a model. Default is + `None`. + + exist_ok (`bool`, *optional*, defaults to `False`): + If `True`, do not raise an error if tag already exists. + + Raises: + [`~utils.RepositoryNotFoundError`]: + If repository is not found (error 404): wrong repo_id/repo_type, private + but not authenticated or repo does not exist. + [`~utils.RevisionNotFoundError`]: + If revision is not found (error 404) on the repo. + [`~utils.HfHubHTTPError`]: + If the branch already exists on the repo (error 409) and `exist_ok` is + set to `False`. + """ + if repo_type is None: + repo_type = constants.REPO_TYPE_MODEL + revision = quote(revision, safe="") if revision is not None else constants.DEFAULT_REVISION + + # Prepare request + tag_url = f"{self.endpoint}/api/{repo_type}s/{repo_id}/tag/{revision}" + headers = self._build_hf_headers(token=token) + payload = {"tag": tag} + if tag_message is not None: + payload["message"] = tag_message + + # Tag + response = get_session().post(url=tag_url, headers=headers, json=payload) + try: + hf_raise_for_status(response) + except HfHubHTTPError as e: + if not (e.response.status_code == 409 and exist_ok): + raise + + @validate_hf_hub_args + def delete_tag( + self, + repo_id: str, + *, + tag: str, + token: Union[bool, str, None] = None, + repo_type: Optional[str] = None, + ) -> None: + """ + Delete a tag from a repo on the Hub. + + Args: + repo_id (`str`): + The repository in which a tag will be deleted. + Example: `"user/my-cool-model"`. + + tag (`str`): + The name of the tag to delete. + + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if tagging a dataset or space, `None` or + `"model"` if tagging a model. Default is `None`. + + Raises: + [`~utils.RepositoryNotFoundError`]: + If repository is not found (error 404): wrong repo_id/repo_type, private + but not authenticated or repo does not exist. + [`~utils.RevisionNotFoundError`]: + If tag is not found. + """ + if repo_type is None: + repo_type = constants.REPO_TYPE_MODEL + tag = quote(tag, safe="") + + # Prepare request + tag_url = f"{self.endpoint}/api/{repo_type}s/{repo_id}/tag/{tag}" + headers = self._build_hf_headers(token=token) + + # Un-tag + response = get_session().delete(url=tag_url, headers=headers) + hf_raise_for_status(response) + + @validate_hf_hub_args + def get_full_repo_name( + self, + model_id: str, + *, + organization: Optional[str] = None, + token: Union[bool, str, None] = None, + ): + """ + Returns the repository name for a given model ID and optional + organization. + + Args: + model_id (`str`): + The name of the model. + organization (`str`, *optional*): + If passed, the repository name will be in the organization + namespace instead of the user namespace. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + `str`: The repository name in the user's namespace + ({username}/{model_id}) if no organization is passed, and under the + organization namespace ({organization}/{model_id}) otherwise. + """ + if organization is None: + if "/" in model_id: + username = model_id.split("/")[0] + else: + username = self.whoami(token=token)["name"] # type: ignore + return f"{username}/{model_id}" + else: + return f"{organization}/{model_id}" + + @validate_hf_hub_args + def get_repo_discussions( + self, + repo_id: str, + *, + author: Optional[str] = None, + discussion_type: Optional[constants.DiscussionTypeFilter] = None, + discussion_status: Optional[constants.DiscussionStatusFilter] = None, + repo_type: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> Iterator[Discussion]: + """ + Fetches Discussions and Pull Requests for the given repo. + + Args: + repo_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + author (`str`, *optional*): + Pass a value to filter by discussion author. `None` means no filter. + Default is `None`. + discussion_type (`str`, *optional*): + Set to `"pull_request"` to fetch only pull requests, `"discussion"` + to fetch only discussions. Set to `"all"` or `None` to fetch both. + Default is `None`. + discussion_status (`str`, *optional*): + Set to `"open"` (respectively `"closed"`) to fetch only open + (respectively closed) discussions. Set to `"all"` or `None` + to fetch both. + Default is `None`. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if fetching from a dataset or + space, `None` or `"model"` if fetching from a model. Default is + `None`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + `Iterator[Discussion]`: An iterator of [`Discussion`] objects. + + Example: + Collecting all discussions of a repo in a list: + + ```python + >>> from huggingface_hub import get_repo_discussions + >>> discussions_list = list(get_repo_discussions(repo_id="bert-base-uncased")) + ``` + + Iterating over discussions of a repo: + + ```python + >>> from huggingface_hub import get_repo_discussions + >>> for discussion in get_repo_discussions(repo_id="bert-base-uncased"): + ... print(discussion.num, discussion.title) + ``` + """ + if repo_type not in constants.REPO_TYPES: + raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") + if repo_type is None: + repo_type = constants.REPO_TYPE_MODEL + if discussion_type is not None and discussion_type not in constants.DISCUSSION_TYPES: + raise ValueError(f"Invalid discussion_type, must be one of {constants.DISCUSSION_TYPES}") + if discussion_status is not None and discussion_status not in constants.DISCUSSION_STATUS: + raise ValueError(f"Invalid discussion_status, must be one of {constants.DISCUSSION_STATUS}") + + headers = self._build_hf_headers(token=token) + path = f"{self.endpoint}/api/{repo_type}s/{repo_id}/discussions" + + params: Dict[str, Union[str, int]] = {} + if discussion_type is not None: + params["type"] = discussion_type + if discussion_status is not None: + params["status"] = discussion_status + if author is not None: + params["author"] = author + + def _fetch_discussion_page(page_index: int): + params["p"] = page_index + resp = get_session().get(path, headers=headers, params=params) + hf_raise_for_status(resp) + paginated_discussions = resp.json() + total = paginated_discussions["count"] + start = paginated_discussions["start"] + discussions = paginated_discussions["discussions"] + has_next = (start + len(discussions)) < total + return discussions, has_next + + has_next, page_index = True, 0 + + while has_next: + discussions, has_next = _fetch_discussion_page(page_index=page_index) + for discussion in discussions: + yield Discussion( + title=discussion["title"], + num=discussion["num"], + author=discussion.get("author", {}).get("name", "deleted"), + created_at=parse_datetime(discussion["createdAt"]), + status=discussion["status"], + repo_id=discussion["repo"]["name"], + repo_type=discussion["repo"]["type"], + is_pull_request=discussion["isPullRequest"], + endpoint=self.endpoint, + ) + page_index = page_index + 1 + + @validate_hf_hub_args + def get_discussion_details( + self, + repo_id: str, + discussion_num: int, + *, + repo_type: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> DiscussionWithDetails: + """Fetches a Discussion's / Pull Request 's details from the Hub. + + Args: + repo_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + discussion_num (`int`): + The number of the Discussion or Pull Request . Must be a strictly positive integer. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if uploading to a dataset or + space, `None` or `"model"` if uploading to a model. Default is + `None`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: [`DiscussionWithDetails`] + + > [!TIP] + > Raises the following errors: + > + > - [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) + > if the HuggingFace API returned an error + > - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + > if some parameter value is invalid + > - [`~utils.RepositoryNotFoundError`] + > If the repository to download from cannot be found. This may be because it doesn't exist, + > or because it is set to `private` and you do not have access. + """ + if not isinstance(discussion_num, int) or discussion_num <= 0: + raise ValueError("Invalid discussion_num, must be a positive integer") + if repo_type not in constants.REPO_TYPES: + raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") + if repo_type is None: + repo_type = constants.REPO_TYPE_MODEL + + path = f"{self.endpoint}/api/{repo_type}s/{repo_id}/discussions/{discussion_num}" + headers = self._build_hf_headers(token=token) + resp = get_session().get(path, params={"diff": "1"}, headers=headers) + hf_raise_for_status(resp) + + discussion_details = resp.json() + is_pull_request = discussion_details["isPullRequest"] + + target_branch = discussion_details["changes"]["base"] if is_pull_request else None + conflicting_files = discussion_details["filesWithConflicts"] if is_pull_request else None + merge_commit_oid = discussion_details["changes"].get("mergeCommitId", None) if is_pull_request else None + + return DiscussionWithDetails( + title=discussion_details["title"], + num=discussion_details["num"], + author=discussion_details.get("author", {}).get("name", "deleted"), + created_at=parse_datetime(discussion_details["createdAt"]), + status=discussion_details["status"], + repo_id=discussion_details["repo"]["name"], + repo_type=discussion_details["repo"]["type"], + is_pull_request=discussion_details["isPullRequest"], + events=[deserialize_event(evt) for evt in discussion_details["events"]], + conflicting_files=conflicting_files, + target_branch=target_branch, + merge_commit_oid=merge_commit_oid, + diff=discussion_details.get("diff"), + endpoint=self.endpoint, + ) + + @validate_hf_hub_args + def create_discussion( + self, + repo_id: str, + title: str, + *, + token: Union[bool, str, None] = None, + description: Optional[str] = None, + repo_type: Optional[str] = None, + pull_request: bool = False, + ) -> DiscussionWithDetails: + """Creates a Discussion or Pull Request. + + Pull Requests created programmatically will be in `"draft"` status. + + Creating a Pull Request with changes can also be done at once with [`HfApi.create_commit`]. + + Args: + repo_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + title (`str`): + The title of the discussion. It can be up to 200 characters long, + and must be at least 3 characters long. Leading and trailing whitespaces + will be stripped. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + description (`str`, *optional*): + An optional description for the Pull Request. + Defaults to `"Discussion opened with the huggingface_hub Python library"` + pull_request (`bool`, *optional*): + Whether to create a Pull Request or discussion. If `True`, creates a Pull Request. + If `False`, creates a discussion. Defaults to `False`. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if uploading to a dataset or + space, `None` or `"model"` if uploading to a model. Default is + `None`. + + Returns: [`DiscussionWithDetails`] + + > [!TIP] + > Raises the following errors: + > + > - [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) + > if the HuggingFace API returned an error + > - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + > if some parameter value is invalid + > - [`~utils.RepositoryNotFoundError`] + > If the repository to download from cannot be found. This may be because it doesn't exist, + > or because it is set to `private` and you do not have access.""" + if repo_type not in constants.REPO_TYPES: + raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") + if repo_type is None: + repo_type = constants.REPO_TYPE_MODEL + + if description is not None: + description = description.strip() + description = ( + description + if description + else ( + f"{'Pull Request' if pull_request else 'Discussion'} opened with the" + " [huggingface_hub Python" + " library](https://huggingface.co/docs/huggingface_hub)" + ) + ) + + headers = self._build_hf_headers(token=token) + resp = get_session().post( + f"{self.endpoint}/api/{repo_type}s/{repo_id}/discussions", + json={ + "title": title.strip(), + "description": description, + "pullRequest": pull_request, + }, + headers=headers, + ) + hf_raise_for_status(resp) + num = resp.json()["num"] + return self.get_discussion_details( + repo_id=repo_id, + repo_type=repo_type, + discussion_num=num, + token=token, + ) + + @validate_hf_hub_args + def create_pull_request( + self, + repo_id: str, + title: str, + *, + token: Union[bool, str, None] = None, + description: Optional[str] = None, + repo_type: Optional[str] = None, + ) -> DiscussionWithDetails: + """Creates a Pull Request . Pull Requests created programmatically will be in `"draft"` status. + + Creating a Pull Request with changes can also be done at once with [`HfApi.create_commit`]; + + This is a wrapper around [`HfApi.create_discussion`]. + + Args: + repo_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + title (`str`): + The title of the discussion. It can be up to 200 characters long, + and must be at least 3 characters long. Leading and trailing whitespaces + will be stripped. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + description (`str`, *optional*): + An optional description for the Pull Request. + Defaults to `"Discussion opened with the huggingface_hub Python library"` + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if uploading to a dataset or + space, `None` or `"model"` if uploading to a model. Default is + `None`. + + Returns: [`DiscussionWithDetails`] + + > [!TIP] + > Raises the following errors: + > + > - [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) + > if the HuggingFace API returned an error + > - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + > if some parameter value is invalid + > - [`~utils.RepositoryNotFoundError`] + > If the repository to download from cannot be found. This may be because it doesn't exist, + > or because it is set to `private` and you do not have access.""" + return self.create_discussion( + repo_id=repo_id, + title=title, + token=token, + description=description, + repo_type=repo_type, + pull_request=True, + ) + + def _post_discussion_changes( + self, + *, + repo_id: str, + discussion_num: int, + resource: str, + body: Optional[dict] = None, + token: Union[bool, str, None] = None, + repo_type: Optional[str] = None, + ) -> requests.Response: + """Internal utility to POST changes to a Discussion or Pull Request""" + if not isinstance(discussion_num, int) or discussion_num <= 0: + raise ValueError("Invalid discussion_num, must be a positive integer") + if repo_type not in constants.REPO_TYPES: + raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") + if repo_type is None: + repo_type = constants.REPO_TYPE_MODEL + repo_id = f"{repo_type}s/{repo_id}" + + path = f"{self.endpoint}/api/{repo_id}/discussions/{discussion_num}/{resource}" + + headers = self._build_hf_headers(token=token) + resp = requests.post(path, headers=headers, json=body) + hf_raise_for_status(resp) + return resp + + @validate_hf_hub_args + def comment_discussion( + self, + repo_id: str, + discussion_num: int, + comment: str, + *, + token: Union[bool, str, None] = None, + repo_type: Optional[str] = None, + ) -> DiscussionComment: + """Creates a new comment on the given Discussion. + + Args: + repo_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + discussion_num (`int`): + The number of the Discussion or Pull Request . Must be a strictly positive integer. + comment (`str`): + The content of the comment to create. Comments support markdown formatting. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if uploading to a dataset or + space, `None` or `"model"` if uploading to a model. Default is + `None`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + [`DiscussionComment`]: the newly created comment + + + Examples: + ```python + + >>> comment = \"\"\" + ... Hello @otheruser! + ... + ... # This is a title + ... + ... **This is bold**, *this is italic* and ~this is strikethrough~ + ... And [this](http://url) is a link + ... \"\"\" + + >>> HfApi().comment_discussion( + ... repo_id="username/repo_name", + ... discussion_num=34 + ... comment=comment + ... ) + # DiscussionComment(id='deadbeef0000000', type='comment', ...) + + ``` + + > [!TIP] + > Raises the following errors: + > + > - [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) + > if the HuggingFace API returned an error + > - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + > if some parameter value is invalid + > - [`~utils.RepositoryNotFoundError`] + > If the repository to download from cannot be found. This may be because it doesn't exist, + > or because it is set to `private` and you do not have access. + """ + resp = self._post_discussion_changes( + repo_id=repo_id, + repo_type=repo_type, + discussion_num=discussion_num, + token=token, + resource="comment", + body={"comment": comment}, + ) + return deserialize_event(resp.json()["newMessage"]) # type: ignore + + @validate_hf_hub_args + def rename_discussion( + self, + repo_id: str, + discussion_num: int, + new_title: str, + *, + token: Union[bool, str, None] = None, + repo_type: Optional[str] = None, + ) -> DiscussionTitleChange: + """Renames a Discussion. + + Args: + repo_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + discussion_num (`int`): + The number of the Discussion or Pull Request . Must be a strictly positive integer. + new_title (`str`): + The new title for the discussion + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if uploading to a dataset or + space, `None` or `"model"` if uploading to a model. Default is + `None`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + [`DiscussionTitleChange`]: the title change event + + + Examples: + ```python + >>> new_title = "New title, fixing a typo" + >>> HfApi().rename_discussion( + ... repo_id="username/repo_name", + ... discussion_num=34 + ... new_title=new_title + ... ) + # DiscussionTitleChange(id='deadbeef0000000', type='title-change', ...) + + ``` + + > [!TIP] + > Raises the following errors: + > + > - [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) + > if the HuggingFace API returned an error + > - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + > if some parameter value is invalid + > - [`~utils.RepositoryNotFoundError`] + > If the repository to download from cannot be found. This may be because it doesn't exist, + > or because it is set to `private` and you do not have access. + """ + resp = self._post_discussion_changes( + repo_id=repo_id, + repo_type=repo_type, + discussion_num=discussion_num, + token=token, + resource="title", + body={"title": new_title}, + ) + return deserialize_event(resp.json()["newTitle"]) # type: ignore + + @validate_hf_hub_args + def change_discussion_status( + self, + repo_id: str, + discussion_num: int, + new_status: Literal["open", "closed"], + *, + token: Union[bool, str, None] = None, + comment: Optional[str] = None, + repo_type: Optional[str] = None, + ) -> DiscussionStatusChange: + """Closes or re-opens a Discussion or Pull Request. + + Args: + repo_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + discussion_num (`int`): + The number of the Discussion or Pull Request . Must be a strictly positive integer. + new_status (`str`): + The new status for the discussion, either `"open"` or `"closed"`. + comment (`str`, *optional*): + An optional comment to post with the status change. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if uploading to a dataset or + space, `None` or `"model"` if uploading to a model. Default is + `None`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + [`DiscussionStatusChange`]: the status change event + + + Examples: + ```python + >>> new_title = "New title, fixing a typo" + >>> HfApi().rename_discussion( + ... repo_id="username/repo_name", + ... discussion_num=34 + ... new_title=new_title + ... ) + # DiscussionStatusChange(id='deadbeef0000000', type='status-change', ...) + + ``` + + > [!TIP] + > Raises the following errors: + > + > - [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) + > if the HuggingFace API returned an error + > - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + > if some parameter value is invalid + > - [`~utils.RepositoryNotFoundError`] + > If the repository to download from cannot be found. This may be because it doesn't exist, + > or because it is set to `private` and you do not have access. + """ + if new_status not in ["open", "closed"]: + raise ValueError("Invalid status, valid statuses are: 'open' and 'closed'") + body: Dict[str, str] = {"status": new_status} + if comment and comment.strip(): + body["comment"] = comment.strip() + resp = self._post_discussion_changes( + repo_id=repo_id, + repo_type=repo_type, + discussion_num=discussion_num, + token=token, + resource="status", + body=body, + ) + return deserialize_event(resp.json()["newStatus"]) # type: ignore + + @validate_hf_hub_args + def merge_pull_request( + self, + repo_id: str, + discussion_num: int, + *, + token: Union[bool, str, None] = None, + comment: Optional[str] = None, + repo_type: Optional[str] = None, + ): + """Merges a Pull Request. + + Args: + repo_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + discussion_num (`int`): + The number of the Discussion or Pull Request . Must be a strictly positive integer. + comment (`str`, *optional*): + An optional comment to post with the status change. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if uploading to a dataset or + space, `None` or `"model"` if uploading to a model. Default is + `None`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + [`DiscussionStatusChange`]: the status change event + + > [!TIP] + > Raises the following errors: + > + > - [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) + > if the HuggingFace API returned an error + > - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + > if some parameter value is invalid + > - [`~utils.RepositoryNotFoundError`] + > If the repository to download from cannot be found. This may be because it doesn't exist, + > or because it is set to `private` and you do not have access. + """ + self._post_discussion_changes( + repo_id=repo_id, + repo_type=repo_type, + discussion_num=discussion_num, + token=token, + resource="merge", + body={"comment": comment.strip()} if comment and comment.strip() else None, + ) + + @validate_hf_hub_args + def edit_discussion_comment( + self, + repo_id: str, + discussion_num: int, + comment_id: str, + new_content: str, + *, + token: Union[bool, str, None] = None, + repo_type: Optional[str] = None, + ) -> DiscussionComment: + """Edits a comment on a Discussion / Pull Request. + + Args: + repo_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + discussion_num (`int`): + The number of the Discussion or Pull Request . Must be a strictly positive integer. + comment_id (`str`): + The ID of the comment to edit. + new_content (`str`): + The new content of the comment. Comments support markdown formatting. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if uploading to a dataset or + space, `None` or `"model"` if uploading to a model. Default is + `None`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + [`DiscussionComment`]: the edited comment + + > [!TIP] + > Raises the following errors: + > + > - [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) + > if the HuggingFace API returned an error + > - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + > if some parameter value is invalid + > - [`~utils.RepositoryNotFoundError`] + > If the repository to download from cannot be found. This may be because it doesn't exist, + > or because it is set to `private` and you do not have access. + """ + resp = self._post_discussion_changes( + repo_id=repo_id, + repo_type=repo_type, + discussion_num=discussion_num, + token=token, + resource=f"comment/{comment_id.lower()}/edit", + body={"content": new_content}, + ) + return deserialize_event(resp.json()["updatedComment"]) # type: ignore + + @validate_hf_hub_args + def hide_discussion_comment( + self, + repo_id: str, + discussion_num: int, + comment_id: str, + *, + token: Union[bool, str, None] = None, + repo_type: Optional[str] = None, + ) -> DiscussionComment: + """Hides a comment on a Discussion / Pull Request. + + > [!WARNING] + > Hidden comments' content cannot be retrieved anymore. Hiding a comment is irreversible. + + Args: + repo_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + discussion_num (`int`): + The number of the Discussion or Pull Request . Must be a strictly positive integer. + comment_id (`str`): + The ID of the comment to edit. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if uploading to a dataset or + space, `None` or `"model"` if uploading to a model. Default is + `None`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + [`DiscussionComment`]: the hidden comment + + > [!TIP] + > Raises the following errors: + > + > - [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) + > if the HuggingFace API returned an error + > - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + > if some parameter value is invalid + > - [`~utils.RepositoryNotFoundError`] + > If the repository to download from cannot be found. This may be because it doesn't exist, + > or because it is set to `private` and you do not have access. + """ + warnings.warn( + "Hidden comments' content cannot be retrieved anymore. Hiding a comment is irreversible.", + UserWarning, + ) + resp = self._post_discussion_changes( + repo_id=repo_id, + repo_type=repo_type, + discussion_num=discussion_num, + token=token, + resource=f"comment/{comment_id.lower()}/hide", + ) + return deserialize_event(resp.json()["updatedComment"]) # type: ignore + + @validate_hf_hub_args + def add_space_secret( + self, + repo_id: str, + key: str, + value: str, + *, + description: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> None: + """Adds or updates a secret in a Space. + + Secrets allow to set secret keys or tokens to a Space without hardcoding them. + For more details, see https://huggingface.co/docs/hub/spaces-overview#managing-secrets. + + Args: + repo_id (`str`): + ID of the repo to update. Example: `"bigcode/in-the-stack"`. + key (`str`): + Secret key. Example: `"GITHUB_API_KEY"` + value (`str`): + Secret value. Example: `"your_github_api_key"`. + description (`str`, *optional*): + Secret description. Example: `"Github API key to access the Github API"`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + """ + payload = {"key": key, "value": value} + if description is not None: + payload["description"] = description + r = get_session().post( + f"{self.endpoint}/api/spaces/{repo_id}/secrets", + headers=self._build_hf_headers(token=token), + json=payload, + ) + hf_raise_for_status(r) + + @validate_hf_hub_args + def delete_space_secret(self, repo_id: str, key: str, *, token: Union[bool, str, None] = None) -> None: + """Deletes a secret from a Space. + + Secrets allow to set secret keys or tokens to a Space without hardcoding them. + For more details, see https://huggingface.co/docs/hub/spaces-overview#managing-secrets. + + Args: + repo_id (`str`): + ID of the repo to update. Example: `"bigcode/in-the-stack"`. + key (`str`): + Secret key. Example: `"GITHUB_API_KEY"`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + """ + r = get_session().delete( + f"{self.endpoint}/api/spaces/{repo_id}/secrets", + headers=self._build_hf_headers(token=token), + json={"key": key}, + ) + hf_raise_for_status(r) + + @validate_hf_hub_args + def get_space_variables(self, repo_id: str, *, token: Union[bool, str, None] = None) -> Dict[str, SpaceVariable]: + """Gets all variables from a Space. + + Variables allow to set environment variables to a Space without hardcoding them. + For more details, see https://huggingface.co/docs/hub/spaces-overview#managing-secrets-and-environment-variables + + Args: + repo_id (`str`): + ID of the repo to query. Example: `"bigcode/in-the-stack"`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + """ + r = get_session().get( + f"{self.endpoint}/api/spaces/{repo_id}/variables", + headers=self._build_hf_headers(token=token), + ) + hf_raise_for_status(r) + return {k: SpaceVariable(k, v) for k, v in r.json().items()} + + @validate_hf_hub_args + def add_space_variable( + self, + repo_id: str, + key: str, + value: str, + *, + description: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> Dict[str, SpaceVariable]: + """Adds or updates a variable in a Space. + + Variables allow to set environment variables to a Space without hardcoding them. + For more details, see https://huggingface.co/docs/hub/spaces-overview#managing-secrets-and-environment-variables + + Args: + repo_id (`str`): + ID of the repo to update. Example: `"bigcode/in-the-stack"`. + key (`str`): + Variable key. Example: `"MODEL_REPO_ID"` + value (`str`): + Variable value. Example: `"the_model_repo_id"`. + description (`str`): + Description of the variable. Example: `"Model Repo ID of the implemented model"`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + """ + payload = {"key": key, "value": value} + if description is not None: + payload["description"] = description + r = get_session().post( + f"{self.endpoint}/api/spaces/{repo_id}/variables", + headers=self._build_hf_headers(token=token), + json=payload, + ) + hf_raise_for_status(r) + return {k: SpaceVariable(k, v) for k, v in r.json().items()} + + @validate_hf_hub_args + def delete_space_variable( + self, repo_id: str, key: str, *, token: Union[bool, str, None] = None + ) -> Dict[str, SpaceVariable]: + """Deletes a variable from a Space. + + Variables allow to set environment variables to a Space without hardcoding them. + For more details, see https://huggingface.co/docs/hub/spaces-overview#managing-secrets-and-environment-variables + + Args: + repo_id (`str`): + ID of the repo to update. Example: `"bigcode/in-the-stack"`. + key (`str`): + Variable key. Example: `"MODEL_REPO_ID"` + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + """ + r = get_session().delete( + f"{self.endpoint}/api/spaces/{repo_id}/variables", + headers=self._build_hf_headers(token=token), + json={"key": key}, + ) + hf_raise_for_status(r) + return {k: SpaceVariable(k, v) for k, v in r.json().items()} + + @validate_hf_hub_args + def get_space_runtime(self, repo_id: str, *, token: Union[bool, str, None] = None) -> SpaceRuntime: + """Gets runtime information about a Space. + + Args: + repo_id (`str`): + ID of the repo to update. Example: `"bigcode/in-the-stack"`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + Returns: + [`SpaceRuntime`]: Runtime information about a Space including Space stage and hardware. + """ + r = get_session().get( + f"{self.endpoint}/api/spaces/{repo_id}/runtime", headers=self._build_hf_headers(token=token) + ) + hf_raise_for_status(r) + return SpaceRuntime(r.json()) + + @validate_hf_hub_args + def request_space_hardware( + self, + repo_id: str, + hardware: SpaceHardware, + *, + token: Union[bool, str, None] = None, + sleep_time: Optional[int] = None, + ) -> SpaceRuntime: + """Request new hardware for a Space. + + Args: + repo_id (`str`): + ID of the repo to update. Example: `"bigcode/in-the-stack"`. + hardware (`str` or [`SpaceHardware`]): + Hardware on which to run the Space. Example: `"t4-medium"`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + sleep_time (`int`, *optional*): + Number of seconds of inactivity to wait before a Space is put to sleep. Set to `-1` if you don't want + your Space to sleep (default behavior for upgraded hardware). For free hardware, you can't configure + the sleep time (value is fixed to 48 hours of inactivity). + See https://huggingface.co/docs/hub/spaces-gpus#sleep-time for more details. + Returns: + [`SpaceRuntime`]: Runtime information about a Space including Space stage and hardware. + + > [!TIP] + > It is also possible to request hardware directly when creating the Space repo! See [`create_repo`] for details. + """ + if sleep_time is not None and hardware == SpaceHardware.CPU_BASIC: + warnings.warn( + "If your Space runs on the default 'cpu-basic' hardware, it will go to sleep if inactive for more" + " than 48 hours. This value is not configurable. If you don't want your Space to deactivate or if" + " you want to set a custom sleep time, you need to upgrade to a paid Hardware.", + UserWarning, + ) + payload: Dict[str, Any] = {"flavor": hardware} + if sleep_time is not None: + payload["sleepTimeSeconds"] = sleep_time + r = get_session().post( + f"{self.endpoint}/api/spaces/{repo_id}/hardware", + headers=self._build_hf_headers(token=token), + json=payload, + ) + hf_raise_for_status(r) + return SpaceRuntime(r.json()) + + @validate_hf_hub_args + def set_space_sleep_time( + self, repo_id: str, sleep_time: int, *, token: Union[bool, str, None] = None + ) -> SpaceRuntime: + """Set a custom sleep time for a Space running on upgraded hardware.. + + Your Space will go to sleep after X seconds of inactivity. You are not billed when your Space is in "sleep" + mode. If a new visitor lands on your Space, it will "wake it up". Only upgraded hardware can have a + configurable sleep time. To know more about the sleep stage, please refer to + https://huggingface.co/docs/hub/spaces-gpus#sleep-time. + + Args: + repo_id (`str`): + ID of the repo to update. Example: `"bigcode/in-the-stack"`. + sleep_time (`int`, *optional*): + Number of seconds of inactivity to wait before a Space is put to sleep. Set to `-1` if you don't want + your Space to pause (default behavior for upgraded hardware). For free hardware, you can't configure + the sleep time (value is fixed to 48 hours of inactivity). + See https://huggingface.co/docs/hub/spaces-gpus#sleep-time for more details. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + Returns: + [`SpaceRuntime`]: Runtime information about a Space including Space stage and hardware. + + > [!TIP] + > It is also possible to set a custom sleep time when requesting hardware with [`request_space_hardware`]. + """ + r = get_session().post( + f"{self.endpoint}/api/spaces/{repo_id}/sleeptime", + headers=self._build_hf_headers(token=token), + json={"seconds": sleep_time}, + ) + hf_raise_for_status(r) + runtime = SpaceRuntime(r.json()) + + hardware = runtime.requested_hardware or runtime.hardware + if hardware == SpaceHardware.CPU_BASIC: + warnings.warn( + "If your Space runs on the default 'cpu-basic' hardware, it will go to sleep if inactive for more" + " than 48 hours. This value is not configurable. If you don't want your Space to deactivate or if" + " you want to set a custom sleep time, you need to upgrade to a paid Hardware.", + UserWarning, + ) + return runtime + + @validate_hf_hub_args + def pause_space(self, repo_id: str, *, token: Union[bool, str, None] = None) -> SpaceRuntime: + """Pause your Space. + + A paused Space stops executing until manually restarted by its owner. This is different from the sleeping + state in which free Spaces go after 48h of inactivity. Paused time is not billed to your account, no matter the + hardware you've selected. To restart your Space, use [`restart_space`] and go to your Space settings page. + + For more details, please visit [the docs](https://huggingface.co/docs/hub/spaces-gpus#pause). + + Args: + repo_id (`str`): + ID of the Space to pause. Example: `"Salesforce/BLIP2"`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + [`SpaceRuntime`]: Runtime information about your Space including `stage=PAUSED` and requested hardware. + + Raises: + [`~utils.RepositoryNotFoundError`]: + If your Space is not found (error 404). Most probably wrong repo_id or your space is private but you + are not authenticated. + [`~utils.HfHubHTTPError`]: + 403 Forbidden: only the owner of a Space can pause it. If you want to manage a Space that you don't + own, either ask the owner by opening a Discussion or duplicate the Space. + [`~utils.BadRequestError`]: + If your Space is a static Space. Static Spaces are always running and never billed. If you want to hide + a static Space, you can set it to private. + """ + r = get_session().post( + f"{self.endpoint}/api/spaces/{repo_id}/pause", headers=self._build_hf_headers(token=token) + ) + hf_raise_for_status(r) + return SpaceRuntime(r.json()) + + @validate_hf_hub_args + def restart_space( + self, repo_id: str, *, token: Union[bool, str, None] = None, factory_reboot: bool = False + ) -> SpaceRuntime: + """Restart your Space. + + This is the only way to programmatically restart a Space if you've put it on Pause (see [`pause_space`]). You + must be the owner of the Space to restart it. If you are using an upgraded hardware, your account will be + billed as soon as the Space is restarted. You can trigger a restart no matter the current state of a Space. + + For more details, please visit [the docs](https://huggingface.co/docs/hub/spaces-gpus#pause). + + Args: + repo_id (`str`): + ID of the Space to restart. Example: `"Salesforce/BLIP2"`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + factory_reboot (`bool`, *optional*): + If `True`, the Space will be rebuilt from scratch without caching any requirements. + + Returns: + [`SpaceRuntime`]: Runtime information about your Space. + + Raises: + [`~utils.RepositoryNotFoundError`]: + If your Space is not found (error 404). Most probably wrong repo_id or your space is private but you + are not authenticated. + [`~utils.HfHubHTTPError`]: + 403 Forbidden: only the owner of a Space can restart it. If you want to restart a Space that you don't + own, either ask the owner by opening a Discussion or duplicate the Space. + [`~utils.BadRequestError`]: + If your Space is a static Space. Static Spaces are always running and never billed. If you want to hide + a static Space, you can set it to private. + """ + params = {} + if factory_reboot: + params["factory"] = "true" + r = get_session().post( + f"{self.endpoint}/api/spaces/{repo_id}/restart", headers=self._build_hf_headers(token=token), params=params + ) + hf_raise_for_status(r) + return SpaceRuntime(r.json()) + + @validate_hf_hub_args + def duplicate_space( + self, + from_id: str, + to_id: Optional[str] = None, + *, + private: Optional[bool] = None, + token: Union[bool, str, None] = None, + exist_ok: bool = False, + hardware: Optional[SpaceHardware] = None, + storage: Optional[SpaceStorage] = None, + sleep_time: Optional[int] = None, + secrets: Optional[List[Dict[str, str]]] = None, + variables: Optional[List[Dict[str, str]]] = None, + ) -> RepoUrl: + """Duplicate a Space. + + Programmatically duplicate a Space. The new Space will be created in your account and will be in the same state + as the original Space (running or paused). You can duplicate a Space no matter the current state of a Space. + + Args: + from_id (`str`): + ID of the Space to duplicate. Example: `"pharma/CLIP-Interrogator"`. + to_id (`str`, *optional*): + ID of the new Space. Example: `"dog/CLIP-Interrogator"`. If not provided, the new Space will have the same + name as the original Space, but in your account. + private (`bool`, *optional*): + Whether the new Space should be private or not. Defaults to the same privacy as the original Space. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + exist_ok (`bool`, *optional*, defaults to `False`): + If `True`, do not raise an error if repo already exists. + hardware (`SpaceHardware` or `str`, *optional*): + Choice of Hardware. Example: `"t4-medium"`. See [`SpaceHardware`] for a complete list. + storage (`SpaceStorage` or `str`, *optional*): + Choice of persistent storage tier. Example: `"small"`. See [`SpaceStorage`] for a complete list. + sleep_time (`int`, *optional*): + Number of seconds of inactivity to wait before a Space is put to sleep. Set to `-1` if you don't want + your Space to sleep (default behavior for upgraded hardware). For free hardware, you can't configure + the sleep time (value is fixed to 48 hours of inactivity). + See https://huggingface.co/docs/hub/spaces-gpus#sleep-time for more details. + secrets (`List[Dict[str, str]]`, *optional*): + A list of secret keys to set in your Space. Each item is in the form `{"key": ..., "value": ..., "description": ...}` where description is optional. + For more details, see https://huggingface.co/docs/hub/spaces-overview#managing-secrets. + variables (`List[Dict[str, str]]`, *optional*): + A list of public environment variables to set in your Space. Each item is in the form `{"key": ..., "value": ..., "description": ...}` where description is optional. + For more details, see https://huggingface.co/docs/hub/spaces-overview#managing-secrets-and-environment-variables. + + Returns: + [`RepoUrl`]: URL to the newly created repo. Value is a subclass of `str` containing + attributes like `endpoint`, `repo_type` and `repo_id`. + + Raises: + [`~utils.RepositoryNotFoundError`]: + If one of `from_id` or `to_id` cannot be found. This may be because it doesn't exist, + or because it is set to `private` and you do not have access. + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + If the HuggingFace API returned an error + + Example: + ```python + >>> from huggingface_hub import duplicate_space + + # Duplicate a Space to your account + >>> duplicate_space("multimodalart/dreambooth-training") + RepoUrl('https://huggingface.co/spaces/nateraw/dreambooth-training',...) + + # Can set custom destination id and visibility flag. + >>> duplicate_space("multimodalart/dreambooth-training", to_id="my-dreambooth", private=True) + RepoUrl('https://huggingface.co/spaces/nateraw/my-dreambooth',...) + ``` + """ + # Parse to_id if provided + parsed_to_id = RepoUrl(to_id) if to_id is not None else None + + # Infer target repo_id + to_namespace = ( # set namespace manually or default to username + parsed_to_id.namespace + if parsed_to_id is not None and parsed_to_id.namespace is not None + else self.whoami(token)["name"] + ) + to_repo_name = parsed_to_id.repo_name if to_id is not None else RepoUrl(from_id).repo_name # type: ignore + + # repository must be a valid repo_id (namespace/repo_name). + payload: Dict[str, Any] = {"repository": f"{to_namespace}/{to_repo_name}"} + + keys = ["private", "hardware", "storageTier", "sleepTimeSeconds", "secrets", "variables"] + values = [private, hardware, storage, sleep_time, secrets, variables] + payload.update({k: v for k, v in zip(keys, values) if v is not None}) + + if sleep_time is not None and hardware == SpaceHardware.CPU_BASIC: + warnings.warn( + "If your Space runs on the default 'cpu-basic' hardware, it will go to sleep if inactive for more" + " than 48 hours. This value is not configurable. If you don't want your Space to deactivate or if" + " you want to set a custom sleep time, you need to upgrade to a paid Hardware.", + UserWarning, + ) + + r = get_session().post( + f"{self.endpoint}/api/spaces/{from_id}/duplicate", + headers=self._build_hf_headers(token=token), + json=payload, + ) + + try: + hf_raise_for_status(r) + except HTTPError as err: + if exist_ok and err.response.status_code == 409: + # Repo already exists and `exist_ok=True` + pass + else: + raise + + return RepoUrl(r.json()["url"], endpoint=self.endpoint) + + @validate_hf_hub_args + def request_space_storage( + self, + repo_id: str, + storage: SpaceStorage, + *, + token: Union[bool, str, None] = None, + ) -> SpaceRuntime: + """Request persistent storage for a Space. + + Args: + repo_id (`str`): + ID of the Space to update. Example: `"open-llm-leaderboard/open_llm_leaderboard"`. + storage (`str` or [`SpaceStorage`]): + Storage tier. Either 'small', 'medium', or 'large'. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + Returns: + [`SpaceRuntime`]: Runtime information about a Space including Space stage and hardware. + + > [!TIP] + > It is not possible to decrease persistent storage after its granted. To do so, you must delete it + > via [`delete_space_storage`]. + """ + payload: Dict[str, SpaceStorage] = {"tier": storage} + r = get_session().post( + f"{self.endpoint}/api/spaces/{repo_id}/storage", + headers=self._build_hf_headers(token=token), + json=payload, + ) + hf_raise_for_status(r) + return SpaceRuntime(r.json()) + + @validate_hf_hub_args + def delete_space_storage( + self, + repo_id: str, + *, + token: Union[bool, str, None] = None, + ) -> SpaceRuntime: + """Delete persistent storage for a Space. + + Args: + repo_id (`str`): + ID of the Space to update. Example: `"open-llm-leaderboard/open_llm_leaderboard"`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + Returns: + [`SpaceRuntime`]: Runtime information about a Space including Space stage and hardware. + Raises: + [`BadRequestError`] + If space has no persistent storage. + + """ + r = get_session().delete( + f"{self.endpoint}/api/spaces/{repo_id}/storage", + headers=self._build_hf_headers(token=token), + ) + hf_raise_for_status(r) + return SpaceRuntime(r.json()) + + ####################### + # Inference Endpoints # + ####################### + + def list_inference_endpoints( + self, namespace: Optional[str] = None, *, token: Union[bool, str, None] = None + ) -> List[InferenceEndpoint]: + """Lists all inference endpoints for the given namespace. + + Args: + namespace (`str`, *optional*): + The namespace to list endpoints for. Defaults to the current user. Set to `"*"` to list all endpoints + from all namespaces (i.e. personal namespace and all orgs the user belongs to). + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + List[`InferenceEndpoint`]: A list of all inference endpoints for the given namespace. + + Example: + ```python + >>> from huggingface_hub import HfApi + >>> api = HfApi() + >>> api.list_inference_endpoints() + [InferenceEndpoint(name='my-endpoint', ...), ...] + ``` + """ + # Special case: list all endpoints for all namespaces the user has access to + if namespace == "*": + user = self.whoami(token=token) + + # List personal endpoints first + endpoints: List[InferenceEndpoint] = list_inference_endpoints(namespace=self._get_namespace(token=token)) + + # Then list endpoints for all orgs the user belongs to and ignore 401 errors (no billing or no access) + for org in user.get("orgs", []): + try: + endpoints += list_inference_endpoints(namespace=org["name"], token=token) + except HfHubHTTPError as error: + if error.response.status_code == 401: # Either no billing or user don't have access) + logger.debug("Cannot list Inference Endpoints for org '%s': %s", org["name"], error) + pass + + return endpoints + + # Normal case: list endpoints for a specific namespace + namespace = namespace or self._get_namespace(token=token) + + response = get_session().get( + f"{constants.INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}", + headers=self._build_hf_headers(token=token), + ) + hf_raise_for_status(response) + + return [ + InferenceEndpoint.from_raw(endpoint, namespace=namespace, token=token) + for endpoint in response.json()["items"] + ] + + def create_inference_endpoint( + self, + name: str, + *, + repository: str, + framework: str, + accelerator: str, + instance_size: str, + instance_type: str, + region: str, + vendor: str, + account_id: Optional[str] = None, + min_replica: int = 1, + max_replica: int = 1, + scale_to_zero_timeout: Optional[int] = None, + revision: Optional[str] = None, + task: Optional[str] = None, + custom_image: Optional[Dict] = None, + env: Optional[Dict[str, str]] = None, + secrets: Optional[Dict[str, str]] = None, + type: InferenceEndpointType = InferenceEndpointType.PROTECTED, + domain: Optional[str] = None, + path: Optional[str] = None, + cache_http_responses: Optional[bool] = None, + tags: Optional[List[str]] = None, + namespace: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> InferenceEndpoint: + """Create a new Inference Endpoint. + + Args: + name (`str`): + The unique name for the new Inference Endpoint. + repository (`str`): + The name of the model repository associated with the Inference Endpoint (e.g. `"gpt2"`). + framework (`str`): + The machine learning framework used for the model (e.g. `"custom"`). + accelerator (`str`): + The hardware accelerator to be used for inference (e.g. `"cpu"`). + instance_size (`str`): + The size or type of the instance to be used for hosting the model (e.g. `"x4"`). + instance_type (`str`): + The cloud instance type where the Inference Endpoint will be deployed (e.g. `"intel-icl"`). + region (`str`): + The cloud region in which the Inference Endpoint will be created (e.g. `"us-east-1"`). + vendor (`str`): + The cloud provider or vendor where the Inference Endpoint will be hosted (e.g. `"aws"`). + account_id (`str`, *optional*): + The account ID used to link a VPC to a private Inference Endpoint (if applicable). + min_replica (`int`, *optional*): + The minimum number of replicas (instances) to keep running for the Inference Endpoint. To enable + scaling to zero, set this value to 0 and adjust `scale_to_zero_timeout` accordingly. Defaults to 1. + max_replica (`int`, *optional*): + The maximum number of replicas (instances) to scale to for the Inference Endpoint. Defaults to 1. + scale_to_zero_timeout (`int`, *optional*): + The duration in minutes before an inactive endpoint is scaled to zero, or no scaling to zero if + set to None and `min_replica` is not 0. Defaults to None. + revision (`str`, *optional*): + The specific model revision to deploy on the Inference Endpoint (e.g. `"6c0e6080953db56375760c0471a8c5f2929baf11"`). + task (`str`, *optional*): + The task on which to deploy the model (e.g. `"text-classification"`). + custom_image (`Dict`, *optional*): + A custom Docker image to use for the Inference Endpoint. This is useful if you want to deploy an + Inference Endpoint running on the `text-generation-inference` (TGI) framework (see examples). + env (`Dict[str, str]`, *optional*): + Non-secret environment variables to inject in the container environment. + secrets (`Dict[str, str]`, *optional*): + Secret values to inject in the container environment. + type ([`InferenceEndpointType]`, *optional*): + The type of the Inference Endpoint, which can be `"protected"` (default), `"public"` or `"private"`. + domain (`str`, *optional*): + The custom domain for the Inference Endpoint deployment, if setup the inference endpoint will be available at this domain (e.g. `"my-new-domain.cool-website.woof"`). + path (`str`, *optional*): + The custom path to the deployed model, should start with a `/` (e.g. `"/models/google-bert/bert-base-uncased"`). + cache_http_responses (`bool`, *optional*): + Whether to cache HTTP responses from the Inference Endpoint. Defaults to `False`. + tags (`List[str]`, *optional*): + A list of tags to associate with the Inference Endpoint. + namespace (`str`, *optional*): + The namespace where the Inference Endpoint will be created. Defaults to the current user's namespace. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + [`InferenceEndpoint`]: information about the updated Inference Endpoint. + + Example: + ```python + >>> from huggingface_hub import HfApi + >>> api = HfApi() + >>> endpoint = api.create_inference_endpoint( + ... "my-endpoint-name", + ... repository="gpt2", + ... framework="pytorch", + ... task="text-generation", + ... accelerator="cpu", + ... vendor="aws", + ... region="us-east-1", + ... type="protected", + ... instance_size="x2", + ... instance_type="intel-icl", + ... ) + >>> endpoint + InferenceEndpoint(name='my-endpoint-name', status="pending",...) + + # Run inference on the endpoint + >>> endpoint.client.text_generation(...) + "..." + ``` + + ```python + # Start an Inference Endpoint running Zephyr-7b-beta on TGI + >>> from huggingface_hub import HfApi + >>> api = HfApi() + >>> endpoint = api.create_inference_endpoint( + ... "aws-zephyr-7b-beta-0486", + ... repository="HuggingFaceH4/zephyr-7b-beta", + ... framework="pytorch", + ... task="text-generation", + ... accelerator="gpu", + ... vendor="aws", + ... region="us-east-1", + ... type="protected", + ... instance_size="x1", + ... instance_type="nvidia-a10g", + ... env={ + ... "MAX_BATCH_PREFILL_TOKENS": "2048", + ... "MAX_INPUT_LENGTH": "1024", + ... "MAX_TOTAL_TOKENS": "1512", + ... "MODEL_ID": "/repository" + ... }, + ... custom_image={ + ... "health_route": "/health", + ... "url": "ghcr.io/huggingface/text-generation-inference:1.1.0", + ... }, + ... secrets={"MY_SECRET_KEY": "secret_value"}, + ... tags=["dev", "text-generation"], + ... ) + ``` + + ```python + # Start an Inference Endpoint running ProsusAI/finbert while scaling to zero in 15 minutes + >>> from huggingface_hub import HfApi + >>> api = HfApi() + >>> endpoint = api.create_inference_endpoint( + ... "finbert-classifier", + ... repository="ProsusAI/finbert", + ... framework="pytorch", + ... task="text-classification", + ... min_replica=0, + ... scale_to_zero_timeout=15, + ... accelerator="cpu", + ... vendor="aws", + ... region="us-east-1", + ... type="protected", + ... instance_size="x2", + ... instance_type="intel-icl", + ... ) + >>> endpoint.wait(timeout=300) + # Run inference on the endpoint + >>> endpoint.client.text_generation(...) + TextClassificationOutputElement(label='positive', score=0.8983615040779114) + ``` + + """ + namespace = namespace or self._get_namespace(token=token) + + if custom_image is not None: + image = ( + custom_image + if next(iter(custom_image)) in constants.INFERENCE_ENDPOINT_IMAGE_KEYS + else {"custom": custom_image} + ) + else: + image = {"huggingface": {}} + + payload: Dict = { + "accountId": account_id, + "compute": { + "accelerator": accelerator, + "instanceSize": instance_size, + "instanceType": instance_type, + "scaling": { + "maxReplica": max_replica, + "minReplica": min_replica, + "scaleToZeroTimeout": scale_to_zero_timeout, + }, + }, + "model": { + "framework": framework, + "repository": repository, + "revision": revision, + "task": task, + "image": image, + }, + "name": name, + "provider": { + "region": region, + "vendor": vendor, + }, + "type": type, + } + if env: + payload["model"]["env"] = env + if secrets: + payload["model"]["secrets"] = secrets + if domain is not None or path is not None: + payload["route"] = {} + if domain is not None: + payload["route"]["domain"] = domain + if path is not None: + payload["route"]["path"] = path + if cache_http_responses is not None: + payload["cacheHttpResponses"] = cache_http_responses + if tags is not None: + payload["tags"] = tags + + response = get_session().post( + f"{constants.INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}", + headers=self._build_hf_headers(token=token), + json=payload, + ) + hf_raise_for_status(response) + + return InferenceEndpoint.from_raw(response.json(), namespace=namespace, token=token) + + @experimental + @validate_hf_hub_args + def create_inference_endpoint_from_catalog( + self, + repo_id: str, + *, + name: Optional[str] = None, + token: Union[bool, str, None] = None, + namespace: Optional[str] = None, + ) -> InferenceEndpoint: + """Create a new Inference Endpoint from a model in the Hugging Face Inference Catalog. + + The goal of the Inference Catalog is to provide a curated list of models that are optimized for inference + and for which default configurations have been tested. See https://endpoints.huggingface.co/catalog for a list + of available models in the catalog. + + Args: + repo_id (`str`): + The ID of the model in the catalog to deploy as an Inference Endpoint. + name (`str`, *optional*): + The unique name for the new Inference Endpoint. If not provided, a random name will be generated. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + namespace (`str`, *optional*): + The namespace where the Inference Endpoint will be created. Defaults to the current user's namespace. + + Returns: + [`InferenceEndpoint`]: information about the new Inference Endpoint. + + > [!WARNING] + > `create_inference_endpoint_from_catalog` is experimental. Its API is subject to change in the future. Please provide feedback + > if you have any suggestions or requests. + """ + token = token or self.token or get_token() + payload: Dict = { + "namespace": namespace or self._get_namespace(token=token), + "repoId": repo_id, + } + if name is not None: + payload["endpointName"] = name + + response = get_session().post( + f"{constants.INFERENCE_CATALOG_ENDPOINT}/deploy", + headers=self._build_hf_headers(token=token), + json=payload, + ) + hf_raise_for_status(response) + data = response.json()["endpoint"] + return InferenceEndpoint.from_raw(data, namespace=data["name"], token=token) + + @experimental + @validate_hf_hub_args + def list_inference_catalog(self, *, token: Union[bool, str, None] = None) -> List[str]: + """List models available in the Hugging Face Inference Catalog. + + The goal of the Inference Catalog is to provide a curated list of models that are optimized for inference + and for which default configurations have been tested. See https://endpoints.huggingface.co/catalog for a list + of available models in the catalog. + + Use [`create_inference_endpoint_from_catalog`] to deploy a model from the catalog. + + Args: + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + + Returns: + List[`str`]: A list of model IDs available in the catalog. + > [!WARNING] + > `list_inference_catalog` is experimental. Its API is subject to change in the future. Please provide feedback + > if you have any suggestions or requests. + """ + response = get_session().get( + f"{constants.INFERENCE_CATALOG_ENDPOINT}/repo-list", + headers=self._build_hf_headers(token=token), + ) + hf_raise_for_status(response) + return response.json()["models"] + + def get_inference_endpoint( + self, name: str, *, namespace: Optional[str] = None, token: Union[bool, str, None] = None + ) -> InferenceEndpoint: + """Get information about an Inference Endpoint. + + Args: + name (`str`): + The name of the Inference Endpoint to retrieve information about. + namespace (`str`, *optional*): + The namespace in which the Inference Endpoint is located. Defaults to the current user. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + [`InferenceEndpoint`]: information about the requested Inference Endpoint. + + Example: + ```python + >>> from huggingface_hub import HfApi + >>> api = HfApi() + >>> endpoint = api.get_inference_endpoint("my-text-to-image") + >>> endpoint + InferenceEndpoint(name='my-text-to-image', ...) + + # Get status + >>> endpoint.status + 'running' + >>> endpoint.url + 'https://my-text-to-image.region.vendor.endpoints.huggingface.cloud' + + # Run inference + >>> endpoint.client.text_to_image(...) + ``` + """ + namespace = namespace or self._get_namespace(token=token) + + response = get_session().get( + f"{constants.INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}/{name}", + headers=self._build_hf_headers(token=token), + ) + hf_raise_for_status(response) + + return InferenceEndpoint.from_raw(response.json(), namespace=namespace, token=token) + + def update_inference_endpoint( + self, + name: str, + *, + # Compute update + accelerator: Optional[str] = None, + instance_size: Optional[str] = None, + instance_type: Optional[str] = None, + min_replica: Optional[int] = None, + max_replica: Optional[int] = None, + scale_to_zero_timeout: Optional[int] = None, + # Model update + repository: Optional[str] = None, + framework: Optional[str] = None, + revision: Optional[str] = None, + task: Optional[str] = None, + custom_image: Optional[Dict] = None, + env: Optional[Dict[str, str]] = None, + secrets: Optional[Dict[str, str]] = None, + # Route update + domain: Optional[str] = None, + path: Optional[str] = None, + # Other + cache_http_responses: Optional[bool] = None, + tags: Optional[List[str]] = None, + namespace: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> InferenceEndpoint: + """Update an Inference Endpoint. + + This method allows the update of either the compute configuration, the deployed model, the route, or any combination. + All arguments are optional but at least one must be provided. + + For convenience, you can also update an Inference Endpoint using [`InferenceEndpoint.update`]. + + Args: + name (`str`): + The name of the Inference Endpoint to update. + + accelerator (`str`, *optional*): + The hardware accelerator to be used for inference (e.g. `"cpu"`). + instance_size (`str`, *optional*): + The size or type of the instance to be used for hosting the model (e.g. `"x4"`). + instance_type (`str`, *optional*): + The cloud instance type where the Inference Endpoint will be deployed (e.g. `"intel-icl"`). + min_replica (`int`, *optional*): + The minimum number of replicas (instances) to keep running for the Inference Endpoint. + max_replica (`int`, *optional*): + The maximum number of replicas (instances) to scale to for the Inference Endpoint. + scale_to_zero_timeout (`int`, *optional*): + The duration in minutes before an inactive endpoint is scaled to zero. + + repository (`str`, *optional*): + The name of the model repository associated with the Inference Endpoint (e.g. `"gpt2"`). + framework (`str`, *optional*): + The machine learning framework used for the model (e.g. `"custom"`). + revision (`str`, *optional*): + The specific model revision to deploy on the Inference Endpoint (e.g. `"6c0e6080953db56375760c0471a8c5f2929baf11"`). + task (`str`, *optional*): + The task on which to deploy the model (e.g. `"text-classification"`). + custom_image (`Dict`, *optional*): + A custom Docker image to use for the Inference Endpoint. This is useful if you want to deploy an + Inference Endpoint running on the `text-generation-inference` (TGI) framework (see examples). + env (`Dict[str, str]`, *optional*): + Non-secret environment variables to inject in the container environment + secrets (`Dict[str, str]`, *optional*): + Secret values to inject in the container environment. + + domain (`str`, *optional*): + The custom domain for the Inference Endpoint deployment, if setup the inference endpoint will be available at this domain (e.g. `"my-new-domain.cool-website.woof"`). + path (`str`, *optional*): + The custom path to the deployed model, should start with a `/` (e.g. `"/models/google-bert/bert-base-uncased"`). + + cache_http_responses (`bool`, *optional*): + Whether to cache HTTP responses from the Inference Endpoint. + tags (`List[str]`, *optional*): + A list of tags to associate with the Inference Endpoint. + + namespace (`str`, *optional*): + The namespace where the Inference Endpoint will be updated. Defaults to the current user's namespace. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + [`InferenceEndpoint`]: information about the updated Inference Endpoint. + """ + namespace = namespace or self._get_namespace(token=token) + + # Populate only the fields that are not None + payload: Dict = defaultdict(lambda: defaultdict(dict)) + if accelerator is not None: + payload["compute"]["accelerator"] = accelerator + if instance_size is not None: + payload["compute"]["instanceSize"] = instance_size + if instance_type is not None: + payload["compute"]["instanceType"] = instance_type + if max_replica is not None: + payload["compute"]["scaling"]["maxReplica"] = max_replica + if min_replica is not None: + payload["compute"]["scaling"]["minReplica"] = min_replica + if scale_to_zero_timeout is not None: + payload["compute"]["scaling"]["scaleToZeroTimeout"] = scale_to_zero_timeout + if repository is not None: + payload["model"]["repository"] = repository + if framework is not None: + payload["model"]["framework"] = framework + if revision is not None: + payload["model"]["revision"] = revision + if task is not None: + payload["model"]["task"] = task + if custom_image is not None: + payload["model"]["image"] = {"custom": custom_image} + if env is not None: + payload["model"]["env"] = env + if secrets is not None: + payload["model"]["secrets"] = secrets + if domain is not None: + payload["route"]["domain"] = domain + if path is not None: + payload["route"]["path"] = path + if cache_http_responses is not None: + payload["cacheHttpResponses"] = cache_http_responses + if tags is not None: + payload["tags"] = tags + + response = get_session().put( + f"{constants.INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}/{name}", + headers=self._build_hf_headers(token=token), + json=payload, + ) + hf_raise_for_status(response) + + return InferenceEndpoint.from_raw(response.json(), namespace=namespace, token=token) + + def delete_inference_endpoint( + self, name: str, *, namespace: Optional[str] = None, token: Union[bool, str, None] = None + ) -> None: + """Delete an Inference Endpoint. + + This operation is not reversible. If you don't want to be charged for an Inference Endpoint, it is preferable + to pause it with [`pause_inference_endpoint`] or scale it to zero with [`scale_to_zero_inference_endpoint`]. + + For convenience, you can also delete an Inference Endpoint using [`InferenceEndpoint.delete`]. + + Args: + name (`str`): + The name of the Inference Endpoint to delete. + namespace (`str`, *optional*): + The namespace in which the Inference Endpoint is located. Defaults to the current user. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + """ + namespace = namespace or self._get_namespace(token=token) + response = get_session().delete( + f"{constants.INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}/{name}", + headers=self._build_hf_headers(token=token), + ) + hf_raise_for_status(response) + + def pause_inference_endpoint( + self, name: str, *, namespace: Optional[str] = None, token: Union[bool, str, None] = None + ) -> InferenceEndpoint: + """Pause an Inference Endpoint. + + A paused Inference Endpoint will not be charged. It can be resumed at any time using [`resume_inference_endpoint`]. + This is different than scaling the Inference Endpoint to zero with [`scale_to_zero_inference_endpoint`], which + would be automatically restarted when a request is made to it. + + For convenience, you can also pause an Inference Endpoint using [`pause_inference_endpoint`]. + + Args: + name (`str`): + The name of the Inference Endpoint to pause. + namespace (`str`, *optional*): + The namespace in which the Inference Endpoint is located. Defaults to the current user. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + [`InferenceEndpoint`]: information about the paused Inference Endpoint. + """ + namespace = namespace or self._get_namespace(token=token) + + response = get_session().post( + f"{constants.INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}/{name}/pause", + headers=self._build_hf_headers(token=token), + ) + hf_raise_for_status(response) + + return InferenceEndpoint.from_raw(response.json(), namespace=namespace, token=token) + + def resume_inference_endpoint( + self, + name: str, + *, + namespace: Optional[str] = None, + running_ok: bool = True, + token: Union[bool, str, None] = None, + ) -> InferenceEndpoint: + """Resume an Inference Endpoint. + + For convenience, you can also resume an Inference Endpoint using [`InferenceEndpoint.resume`]. + + Args: + name (`str`): + The name of the Inference Endpoint to resume. + namespace (`str`, *optional*): + The namespace in which the Inference Endpoint is located. Defaults to the current user. + running_ok (`bool`, *optional*): + If `True`, the method will not raise an error if the Inference Endpoint is already running. Defaults to + `True`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + [`InferenceEndpoint`]: information about the resumed Inference Endpoint. + """ + namespace = namespace or self._get_namespace(token=token) + + response = get_session().post( + f"{constants.INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}/{name}/resume", + headers=self._build_hf_headers(token=token), + ) + try: + hf_raise_for_status(response) + except HfHubHTTPError as error: + # If already running (and it's ok), then fetch current status and return + if running_ok and error.response.status_code == 400 and "already running" in error.response.text: + return self.get_inference_endpoint(name, namespace=namespace, token=token) + # Otherwise, raise the error + raise + + return InferenceEndpoint.from_raw(response.json(), namespace=namespace, token=token) + + def scale_to_zero_inference_endpoint( + self, name: str, *, namespace: Optional[str] = None, token: Union[bool, str, None] = None + ) -> InferenceEndpoint: + """Scale Inference Endpoint to zero. + + An Inference Endpoint scaled to zero will not be charged. It will be resume on the next request to it, with a + cold start delay. This is different than pausing the Inference Endpoint with [`pause_inference_endpoint`], which + would require a manual resume with [`resume_inference_endpoint`]. + + For convenience, you can also scale an Inference Endpoint to zero using [`InferenceEndpoint.scale_to_zero`]. + + Args: + name (`str`): + The name of the Inference Endpoint to scale to zero. + namespace (`str`, *optional*): + The namespace in which the Inference Endpoint is located. Defaults to the current user. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + [`InferenceEndpoint`]: information about the scaled-to-zero Inference Endpoint. + """ + namespace = namespace or self._get_namespace(token=token) + + response = get_session().post( + f"{constants.INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}/{name}/scale-to-zero", + headers=self._build_hf_headers(token=token), + ) + hf_raise_for_status(response) + + return InferenceEndpoint.from_raw(response.json(), namespace=namespace, token=token) + + def _get_namespace(self, token: Union[bool, str, None] = None) -> str: + """Get the default namespace for the current user.""" + me = self.whoami(token=token) + if me["type"] == "user": + return me["name"] + else: + raise ValueError( + "Cannot determine default namespace. You must provide a 'namespace' as input or be logged in as a" + " user." + ) + + ######################## + # Collection Endpoints # + ######################## + @validate_hf_hub_args + def list_collections( + self, + *, + owner: Union[List[str], str, None] = None, + item: Union[List[str], str, None] = None, + sort: Optional[Literal["lastModified", "trending", "upvotes"]] = None, + limit: Optional[int] = None, + token: Union[bool, str, None] = None, + ) -> Iterable[Collection]: + """List collections on the Huggingface Hub, given some filters. + + > [!WARNING] + > When listing collections, the item list per collection is truncated to 4 items maximum. To retrieve all items + > from a collection, you must use [`get_collection`]. + + Args: + owner (`List[str]` or `str`, *optional*): + Filter by owner's username. + item (`List[str]` or `str`, *optional*): + Filter collections containing a particular items. Example: `"models/teknium/OpenHermes-2.5-Mistral-7B"`, `"datasets/squad"` or `"papers/2311.12983"`. + sort (`Literal["lastModified", "trending", "upvotes"]`, *optional*): + Sort collections by last modified, trending or upvotes. + limit (`int`, *optional*): + Maximum number of collections to be returned. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + `Iterable[Collection]`: an iterable of [`Collection`] objects. + """ + # Construct the API endpoint + path = f"{self.endpoint}/api/collections" + headers = self._build_hf_headers(token=token) + params: Dict = {} + if owner is not None: + params.update({"owner": owner}) + if item is not None: + params.update({"item": item}) + if sort is not None: + params.update({"sort": sort}) + if limit is not None: + params.update({"limit": limit}) + + # Paginate over the results until limit is reached + items = paginate(path, headers=headers, params=params) + if limit is not None: + items = islice(items, limit) # Do not iterate over all pages + + # Parse as Collection and return + for position, collection_data in enumerate(items): + yield Collection(position=position, **collection_data) + + def get_collection(self, collection_slug: str, *, token: Union[bool, str, None] = None) -> Collection: + """Gets information about a Collection on the Hub. + + Args: + collection_slug (`str`): + Slug of the collection of the Hub. Example: `"TheBloke/recent-models-64f9a55bb3115b4f513ec026"`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: [`Collection`] + + Example: + + ```py + >>> from huggingface_hub import get_collection + >>> collection = get_collection("TheBloke/recent-models-64f9a55bb3115b4f513ec026") + >>> collection.title + 'Recent models' + >>> len(collection.items) + 37 + >>> collection.items[0] + CollectionItem( + item_object_id='651446103cd773a050bf64c2', + item_id='TheBloke/U-Amethyst-20B-AWQ', + item_type='model', + position=88, + note=None + ) + ``` + """ + r = get_session().get( + f"{self.endpoint}/api/collections/{collection_slug}", headers=self._build_hf_headers(token=token) + ) + hf_raise_for_status(r) + return Collection(**{**r.json(), "endpoint": self.endpoint}) + + def create_collection( + self, + title: str, + *, + namespace: Optional[str] = None, + description: Optional[str] = None, + private: bool = False, + exists_ok: bool = False, + token: Union[bool, str, None] = None, + ) -> Collection: + """Create a new Collection on the Hub. + + Args: + title (`str`): + Title of the collection to create. Example: `"Recent models"`. + namespace (`str`, *optional*): + Namespace of the collection to create (username or org). Will default to the owner name. + description (`str`, *optional*): + Description of the collection to create. + private (`bool`, *optional*): + Whether the collection should be private or not. Defaults to `False` (i.e. public collection). + exists_ok (`bool`, *optional*): + If `True`, do not raise an error if collection already exists. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: [`Collection`] + + Example: + + ```py + >>> from huggingface_hub import create_collection + >>> collection = create_collection( + ... title="ICCV 2023", + ... description="Portfolio of models, papers and demos I presented at ICCV 2023", + ... ) + >>> collection.slug + "username/iccv-2023-64f9a55bb3115b4f513ec026" + ``` + """ + if namespace is None: + namespace = self.whoami(token)["name"] + + payload = { + "title": title, + "namespace": namespace, + "private": private, + } + if description is not None: + payload["description"] = description + + r = get_session().post( + f"{self.endpoint}/api/collections", headers=self._build_hf_headers(token=token), json=payload + ) + try: + hf_raise_for_status(r) + except HTTPError as err: + if exists_ok and err.response.status_code == 409: + # Collection already exists and `exists_ok=True` + slug = r.json()["slug"] + return self.get_collection(slug, token=token) + else: + raise + return Collection(**{**r.json(), "endpoint": self.endpoint}) + + def update_collection_metadata( + self, + collection_slug: str, + *, + title: Optional[str] = None, + description: Optional[str] = None, + position: Optional[int] = None, + private: Optional[bool] = None, + theme: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> Collection: + """Update metadata of a collection on the Hub. + + All arguments are optional. Only provided metadata will be updated. + + Args: + collection_slug (`str`): + Slug of the collection to update. Example: `"TheBloke/recent-models-64f9a55bb3115b4f513ec026"`. + title (`str`): + Title of the collection to update. + description (`str`, *optional*): + Description of the collection to update. + position (`int`, *optional*): + New position of the collection in the list of collections of the user. + private (`bool`, *optional*): + Whether the collection should be private or not. + theme (`str`, *optional*): + Theme of the collection on the Hub. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: [`Collection`] + + Example: + + ```py + >>> from huggingface_hub import update_collection_metadata + >>> collection = update_collection_metadata( + ... collection_slug="username/iccv-2023-64f9a55bb3115b4f513ec026", + ... title="ICCV Oct. 2023" + ... description="Portfolio of models, datasets, papers and demos I presented at ICCV Oct. 2023", + ... private=False, + ... theme="pink", + ... ) + >>> collection.slug + "username/iccv-oct-2023-64f9a55bb3115b4f513ec026" + # ^collection slug got updated but not the trailing ID + ``` + """ + payload = { + "position": position, + "private": private, + "theme": theme, + "title": title, + "description": description, + } + r = get_session().patch( + f"{self.endpoint}/api/collections/{collection_slug}", + headers=self._build_hf_headers(token=token), + # Only send not-none values to the API + json={key: value for key, value in payload.items() if value is not None}, + ) + hf_raise_for_status(r) + return Collection(**{**r.json()["data"], "endpoint": self.endpoint}) + + def delete_collection( + self, collection_slug: str, *, missing_ok: bool = False, token: Union[bool, str, None] = None + ) -> None: + """Delete a collection on the Hub. + + Args: + collection_slug (`str`): + Slug of the collection to delete. Example: `"TheBloke/recent-models-64f9a55bb3115b4f513ec026"`. + missing_ok (`bool`, *optional*): + If `True`, do not raise an error if collection doesn't exists. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Example: + + ```py + >>> from huggingface_hub import delete_collection + >>> collection = delete_collection("username/useless-collection-64f9a55bb3115b4f513ec026", missing_ok=True) + ``` + + > [!WARNING] + > This is a non-revertible action. A deleted collection cannot be restored. + """ + r = get_session().delete( + f"{self.endpoint}/api/collections/{collection_slug}", headers=self._build_hf_headers(token=token) + ) + try: + hf_raise_for_status(r) + except HTTPError as err: + if missing_ok and err.response.status_code == 404: + # Collection doesn't exists and `missing_ok=True` + return + else: + raise + + def add_collection_item( + self, + collection_slug: str, + item_id: str, + item_type: CollectionItemType_T, + *, + note: Optional[str] = None, + exists_ok: bool = False, + token: Union[bool, str, None] = None, + ) -> Collection: + """Add an item to a collection on the Hub. + + Args: + collection_slug (`str`): + Slug of the collection to update. Example: `"TheBloke/recent-models-64f9a55bb3115b4f513ec026"`. + item_id (`str`): + ID of the item to add to the collection. It can be the ID of a repo on the Hub (e.g. `"facebook/bart-large-mnli"`) + or a paper id (e.g. `"2307.09288"`). + item_type (`str`): + Type of the item to add. Can be one of `"model"`, `"dataset"`, `"space"` or `"paper"`. + note (`str`, *optional*): + A note to attach to the item in the collection. The maximum size for a note is 500 characters. + exists_ok (`bool`, *optional*): + If `True`, do not raise an error if item already exists. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: [`Collection`] + + Raises: + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write` + or `admin` role in the organization the repo belongs to or if you passed a `read` token. + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 404 if the item you try to add to the collection does not exist on the Hub. + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 409 if the item you try to add to the collection is already in the collection (and exists_ok=False) + + Example: + + ```py + >>> from huggingface_hub import add_collection_item + >>> collection = add_collection_item( + ... collection_slug="davanstrien/climate-64f99dc2a5067f6b65531bab", + ... item_id="pierre-loic/climate-news-articles", + ... item_type="dataset" + ... ) + >>> collection.items[-1].item_id + "pierre-loic/climate-news-articles" + # ^item got added to the collection on last position + + # Add item with a note + >>> add_collection_item( + ... collection_slug="davanstrien/climate-64f99dc2a5067f6b65531bab", + ... item_id="datasets/climate_fever", + ... item_type="dataset" + ... note="This dataset adopts the FEVER methodology that consists of 1,535 real-world claims regarding climate-change collected on the internet." + ... ) + (...) + ``` + """ + payload: Dict[str, Any] = {"item": {"id": item_id, "type": item_type}} + if note is not None: + payload["note"] = note + r = get_session().post( + f"{self.endpoint}/api/collections/{collection_slug}/items", + headers=self._build_hf_headers(token=token), + json=payload, + ) + try: + hf_raise_for_status(r) + except HTTPError as err: + if exists_ok and err.response.status_code == 409: + # Item already exists and `exists_ok=True` + return self.get_collection(collection_slug, token=token) + else: + raise + return Collection(**{**r.json(), "endpoint": self.endpoint}) + + def update_collection_item( + self, + collection_slug: str, + item_object_id: str, + *, + note: Optional[str] = None, + position: Optional[int] = None, + token: Union[bool, str, None] = None, + ) -> None: + """Update an item in a collection. + + Args: + collection_slug (`str`): + Slug of the collection to update. Example: `"TheBloke/recent-models-64f9a55bb3115b4f513ec026"`. + item_object_id (`str`): + ID of the item in the collection. This is not the id of the item on the Hub (repo_id or paper id). + It must be retrieved from a [`CollectionItem`] object. Example: `collection.items[0].item_object_id`. + note (`str`, *optional*): + A note to attach to the item in the collection. The maximum size for a note is 500 characters. + position (`int`, *optional*): + New position of the item in the collection. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Example: + + ```py + >>> from huggingface_hub import get_collection, update_collection_item + + # Get collection first + >>> collection = get_collection("TheBloke/recent-models-64f9a55bb3115b4f513ec026") + + # Update item based on its ID (add note + update position) + >>> update_collection_item( + ... collection_slug="TheBloke/recent-models-64f9a55bb3115b4f513ec026", + ... item_object_id=collection.items[-1].item_object_id, + ... note="Newly updated model!" + ... position=0, + ... ) + ``` + """ + payload = {"position": position, "note": note} + r = get_session().patch( + f"{self.endpoint}/api/collections/{collection_slug}/items/{item_object_id}", + headers=self._build_hf_headers(token=token), + # Only send not-none values to the API + json={key: value for key, value in payload.items() if value is not None}, + ) + hf_raise_for_status(r) + + def delete_collection_item( + self, + collection_slug: str, + item_object_id: str, + *, + missing_ok: bool = False, + token: Union[bool, str, None] = None, + ) -> None: + """Delete an item from a collection. + + Args: + collection_slug (`str`): + Slug of the collection to update. Example: `"TheBloke/recent-models-64f9a55bb3115b4f513ec026"`. + item_object_id (`str`): + ID of the item in the collection. This is not the id of the item on the Hub (repo_id or paper id). + It must be retrieved from a [`CollectionItem`] object. Example: `collection.items[0].item_object_id`. + missing_ok (`bool`, *optional*): + If `True`, do not raise an error if item doesn't exists. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Example: + + ```py + >>> from huggingface_hub import get_collection, delete_collection_item + + # Get collection first + >>> collection = get_collection("TheBloke/recent-models-64f9a55bb3115b4f513ec026") + + # Delete item based on its ID + >>> delete_collection_item( + ... collection_slug="TheBloke/recent-models-64f9a55bb3115b4f513ec026", + ... item_object_id=collection.items[-1].item_object_id, + ... ) + ``` + """ + r = get_session().delete( + f"{self.endpoint}/api/collections/{collection_slug}/items/{item_object_id}", + headers=self._build_hf_headers(token=token), + ) + try: + hf_raise_for_status(r) + except HTTPError as err: + if missing_ok and err.response.status_code == 404: + # Item already deleted and `missing_ok=True` + return + else: + raise + + ########################## + # Manage access requests # + ########################## + + @validate_hf_hub_args + def list_pending_access_requests( + self, repo_id: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None + ) -> List[AccessRequest]: + """ + Get pending access requests for a given gated repo. + + A pending request means the user has requested access to the repo but the request has not been processed yet. + If the approval mode is automatic, this list should be empty. Pending requests can be accepted or rejected + using [`accept_access_request`] and [`reject_access_request`]. + + For more info about gated repos, see https://huggingface.co/docs/hub/models-gated. + + Args: + repo_id (`str`): + The id of the repo to get access requests for. + repo_type (`str`, *optional*): + The type of the repo to get access requests for. Must be one of `model`, `dataset` or `space`. + Defaults to `model`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + `List[AccessRequest]`: A list of [`AccessRequest`] objects. Each time contains a `username`, `email`, + `status` and `timestamp` attribute. If the gated repo has a custom form, the `fields` attribute will + be populated with user's answers. + + Raises: + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 400 if the repo is not gated. + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write` + or `admin` role in the organization the repo belongs to or if you passed a `read` token. + + Example: + ```py + >>> from huggingface_hub import list_pending_access_requests, accept_access_request + + # List pending requests + >>> requests = list_pending_access_requests("meta-llama/Llama-2-7b") + >>> len(requests) + 411 + >>> requests[0] + [ + AccessRequest( + username='clem', + fullname='Clem 🤗', + email='***', + timestamp=datetime.datetime(2023, 11, 23, 18, 4, 53, 828000, tzinfo=datetime.timezone.utc), + status='pending', + fields=None, + ), + ... + ] + + # Accept Clem's request + >>> accept_access_request("meta-llama/Llama-2-7b", "clem") + ``` + """ + return self._list_access_requests(repo_id, "pending", repo_type=repo_type, token=token) + + @validate_hf_hub_args + def list_accepted_access_requests( + self, repo_id: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None + ) -> List[AccessRequest]: + """ + Get accepted access requests for a given gated repo. + + An accepted request means the user has requested access to the repo and the request has been accepted. The user + can download any file of the repo. If the approval mode is automatic, this list should contains by default all + requests. Accepted requests can be cancelled or rejected at any time using [`cancel_access_request`] and + [`reject_access_request`]. A cancelled request will go back to the pending list while a rejected request will + go to the rejected list. In both cases, the user will lose access to the repo. + + For more info about gated repos, see https://huggingface.co/docs/hub/models-gated. + + Args: + repo_id (`str`): + The id of the repo to get access requests for. + repo_type (`str`, *optional*): + The type of the repo to get access requests for. Must be one of `model`, `dataset` or `space`. + Defaults to `model`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + `List[AccessRequest]`: A list of [`AccessRequest`] objects. Each time contains a `username`, `email`, + `status` and `timestamp` attribute. If the gated repo has a custom form, the `fields` attribute will + be populated with user's answers. + + Raises: + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 400 if the repo is not gated. + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write` + or `admin` role in the organization the repo belongs to or if you passed a `read` token. + + Example: + ```py + >>> from huggingface_hub import list_accepted_access_requests + + >>> requests = list_accepted_access_requests("meta-llama/Llama-2-7b") + >>> len(requests) + 411 + >>> requests[0] + [ + AccessRequest( + username='clem', + fullname='Clem 🤗', + email='***', + timestamp=datetime.datetime(2023, 11, 23, 18, 4, 53, 828000, tzinfo=datetime.timezone.utc), + status='accepted', + fields=None, + ), + ... + ] + ``` + """ + return self._list_access_requests(repo_id, "accepted", repo_type=repo_type, token=token) + + @validate_hf_hub_args + def list_rejected_access_requests( + self, repo_id: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None + ) -> List[AccessRequest]: + """ + Get rejected access requests for a given gated repo. + + A rejected request means the user has requested access to the repo and the request has been explicitly rejected + by a repo owner (either you or another user from your organization). The user cannot download any file of the + repo. Rejected requests can be accepted or cancelled at any time using [`accept_access_request`] and + [`cancel_access_request`]. A cancelled request will go back to the pending list while an accepted request will + go to the accepted list. + + For more info about gated repos, see https://huggingface.co/docs/hub/models-gated. + + Args: + repo_id (`str`): + The id of the repo to get access requests for. + repo_type (`str`, *optional*): + The type of the repo to get access requests for. Must be one of `model`, `dataset` or `space`. + Defaults to `model`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + `List[AccessRequest]`: A list of [`AccessRequest`] objects. Each time contains a `username`, `email`, + `status` and `timestamp` attribute. If the gated repo has a custom form, the `fields` attribute will + be populated with user's answers. + + Raises: + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 400 if the repo is not gated. + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write` + or `admin` role in the organization the repo belongs to or if you passed a `read` token. + + Example: + ```py + >>> from huggingface_hub import list_rejected_access_requests + + >>> requests = list_rejected_access_requests("meta-llama/Llama-2-7b") + >>> len(requests) + 411 + >>> requests[0] + [ + AccessRequest( + username='clem', + fullname='Clem 🤗', + email='***', + timestamp=datetime.datetime(2023, 11, 23, 18, 4, 53, 828000, tzinfo=datetime.timezone.utc), + status='rejected', + fields=None, + ), + ... + ] + ``` + """ + return self._list_access_requests(repo_id, "rejected", repo_type=repo_type, token=token) + + def _list_access_requests( + self, + repo_id: str, + status: Literal["accepted", "rejected", "pending"], + repo_type: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> List[AccessRequest]: + if repo_type not in constants.REPO_TYPES: + raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") + if repo_type is None: + repo_type = constants.REPO_TYPE_MODEL + + response = get_session().get( + f"{constants.ENDPOINT}/api/{repo_type}s/{repo_id}/user-access-request/{status}", + headers=self._build_hf_headers(token=token), + ) + hf_raise_for_status(response) + return [ + AccessRequest( + username=request["user"]["user"], + fullname=request["user"]["fullname"], + email=request["user"].get("email"), + status=request["status"], + timestamp=parse_datetime(request["timestamp"]), + fields=request.get("fields"), # only if custom fields in form + ) + for request in response.json() + ] + + @validate_hf_hub_args + def cancel_access_request( + self, repo_id: str, user: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None + ) -> None: + """ + Cancel an access request from a user for a given gated repo. + + A cancelled request will go back to the pending list and the user will lose access to the repo. + + For more info about gated repos, see https://huggingface.co/docs/hub/models-gated. + + Args: + repo_id (`str`): + The id of the repo to cancel access request for. + user (`str`): + The username of the user which access request should be cancelled. + repo_type (`str`, *optional*): + The type of the repo to cancel access request for. Must be one of `model`, `dataset` or `space`. + Defaults to `model`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Raises: + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 400 if the repo is not gated. + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write` + or `admin` role in the organization the repo belongs to or if you passed a `read` token. + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 404 if the user does not exist on the Hub. + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 404 if the user access request cannot be found. + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 404 if the user access request is already in the pending list. + """ + self._handle_access_request(repo_id, user, "pending", repo_type=repo_type, token=token) + + @validate_hf_hub_args + def accept_access_request( + self, repo_id: str, user: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None + ) -> None: + """ + Accept an access request from a user for a given gated repo. + + Once the request is accepted, the user will be able to download any file of the repo and access the community + tab. If the approval mode is automatic, you don't have to accept requests manually. An accepted request can be + cancelled or rejected at any time using [`cancel_access_request`] and [`reject_access_request`]. + + For more info about gated repos, see https://huggingface.co/docs/hub/models-gated. + + Args: + repo_id (`str`): + The id of the repo to accept access request for. + user (`str`): + The username of the user which access request should be accepted. + repo_type (`str`, *optional*): + The type of the repo to accept access request for. Must be one of `model`, `dataset` or `space`. + Defaults to `model`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Raises: + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 400 if the repo is not gated. + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write` + or `admin` role in the organization the repo belongs to or if you passed a `read` token. + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 404 if the user does not exist on the Hub. + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 404 if the user access request cannot be found. + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 404 if the user access request is already in the accepted list. + """ + self._handle_access_request(repo_id, user, "accepted", repo_type=repo_type, token=token) + + @validate_hf_hub_args + def reject_access_request( + self, + repo_id: str, + user: str, + *, + repo_type: Optional[str] = None, + rejection_reason: Optional[str], + token: Union[bool, str, None] = None, + ) -> None: + """ + Reject an access request from a user for a given gated repo. + + A rejected request will go to the rejected list. The user cannot download any file of the repo. Rejected + requests can be accepted or cancelled at any time using [`accept_access_request`] and [`cancel_access_request`]. + A cancelled request will go back to the pending list while an accepted request will go to the accepted list. + + For more info about gated repos, see https://huggingface.co/docs/hub/models-gated. + + Args: + repo_id (`str`): + The id of the repo to reject access request for. + user (`str`): + The username of the user which access request should be rejected. + repo_type (`str`, *optional*): + The type of the repo to reject access request for. Must be one of `model`, `dataset` or `space`. + Defaults to `model`. + rejection_reason (`str`, *optional*): + Optional rejection reason that will be visible to the user (max 200 characters). + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Raises: + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 400 if the repo is not gated. + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write` + or `admin` role in the organization the repo belongs to or if you passed a `read` token. + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 404 if the user does not exist on the Hub. + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 404 if the user access request cannot be found. + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 404 if the user access request is already in the rejected list. + """ + self._handle_access_request( + repo_id, user, "rejected", repo_type=repo_type, rejection_reason=rejection_reason, token=token + ) + + @validate_hf_hub_args + def _handle_access_request( + self, + repo_id: str, + user: str, + status: Literal["accepted", "rejected", "pending"], + repo_type: Optional[str] = None, + rejection_reason: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> None: + if repo_type not in constants.REPO_TYPES: + raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") + if repo_type is None: + repo_type = constants.REPO_TYPE_MODEL + + payload = {"user": user, "status": status} + + if rejection_reason is not None: + if status != "rejected": + raise ValueError("`rejection_reason` can only be passed when rejecting an access request.") + payload["rejectionReason"] = rejection_reason + + response = get_session().post( + f"{constants.ENDPOINT}/api/{repo_type}s/{repo_id}/user-access-request/handle", + headers=self._build_hf_headers(token=token), + json=payload, + ) + hf_raise_for_status(response) + + @validate_hf_hub_args + def grant_access( + self, repo_id: str, user: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None + ) -> None: + """ + Grant access to a user for a given gated repo. + + Granting access don't require for the user to send an access request by themselves. The user is automatically + added to the accepted list meaning they can download the files You can revoke the granted access at any time + using [`cancel_access_request`] or [`reject_access_request`]. + + For more info about gated repos, see https://huggingface.co/docs/hub/models-gated. + + Args: + repo_id (`str`): + The id of the repo to grant access to. + user (`str`): + The username of the user to grant access. + repo_type (`str`, *optional*): + The type of the repo to grant access to. Must be one of `model`, `dataset` or `space`. + Defaults to `model`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Raises: + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 400 if the repo is not gated. + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 400 if the user already has access to the repo. + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write` + or `admin` role in the organization the repo belongs to or if you passed a `read` token. + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 404 if the user does not exist on the Hub. + """ + if repo_type not in constants.REPO_TYPES: + raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") + if repo_type is None: + repo_type = constants.REPO_TYPE_MODEL + + response = get_session().post( + f"{constants.ENDPOINT}/api/{repo_type}s/{repo_id}/user-access-request/grant", + headers=self._build_hf_headers(token=token), + json={"user": user}, + ) + hf_raise_for_status(response) + return response.json() + + ################### + # Manage webhooks # + ################### + + @validate_hf_hub_args + def get_webhook(self, webhook_id: str, *, token: Union[bool, str, None] = None) -> WebhookInfo: + """Get a webhook by its id. + + Args: + webhook_id (`str`): + The unique identifier of the webhook to get. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved token, which is the recommended + method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + [`WebhookInfo`]: + Info about the webhook. + + Example: + ```python + >>> from huggingface_hub import get_webhook + >>> webhook = get_webhook("654bbbc16f2ec14d77f109cc") + >>> print(webhook) + WebhookInfo( + id="654bbbc16f2ec14d77f109cc", + job=None, + watched=[WebhookWatchedItem(type="user", name="julien-c"), WebhookWatchedItem(type="org", name="HuggingFaceH4")], + url="https://webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548", + secret="my-secret", + domains=["repo", "discussion"], + disabled=False, + ) + ``` + """ + response = get_session().get( + f"{constants.ENDPOINT}/api/settings/webhooks/{webhook_id}", + headers=self._build_hf_headers(token=token), + ) + hf_raise_for_status(response) + webhook_data = response.json()["webhook"] + + watched_items = [WebhookWatchedItem(type=item["type"], name=item["name"]) for item in webhook_data["watched"]] + + webhook = WebhookInfo( + id=webhook_data["id"], + url=webhook_data.get("url"), + job=JobSpec(**webhook_data["job"]) if webhook_data.get("job") else None, + watched=watched_items, + domains=webhook_data["domains"], + secret=webhook_data.get("secret"), + disabled=webhook_data["disabled"], + ) + + return webhook + + @validate_hf_hub_args + def list_webhooks(self, *, token: Union[bool, str, None] = None) -> List[WebhookInfo]: + """List all configured webhooks. + + Args: + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved token, which is the recommended + method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + `List[WebhookInfo]`: + List of webhook info objects. + + Example: + ```python + >>> from huggingface_hub import list_webhooks + >>> webhooks = list_webhooks() + >>> len(webhooks) + 2 + >>> webhooks[0] + WebhookInfo( + id="654bbbc16f2ec14d77f109cc", + watched=[WebhookWatchedItem(type="user", name="julien-c"), WebhookWatchedItem(type="org", name="HuggingFaceH4")], + url="https://webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548", + secret="my-secret", + domains=["repo", "discussion"], + disabled=False, + ) + ``` + """ + response = get_session().get( + f"{constants.ENDPOINT}/api/settings/webhooks", + headers=self._build_hf_headers(token=token), + ) + hf_raise_for_status(response) + webhooks_data = response.json() + + return [ + WebhookInfo( + id=webhook["id"], + url=webhook.get("url"), + job=JobSpec(**webhook["job"]) if webhook.get("job") else None, + watched=[WebhookWatchedItem(type=item["type"], name=item["name"]) for item in webhook["watched"]], + domains=webhook["domains"], + secret=webhook.get("secret"), + disabled=webhook["disabled"], + ) + for webhook in webhooks_data + ] + + @validate_hf_hub_args + def create_webhook( + self, + *, + url: Optional[str] = None, + job_id: Optional[str] = None, + watched: List[Union[Dict, WebhookWatchedItem]], + domains: Optional[List[constants.WEBHOOK_DOMAIN_T]] = None, + secret: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> WebhookInfo: + """Create a new webhook. + + The webhook can either send a payload to a URL, or trigger a Job to run on Hugging Face infrastructure. + This function should be called with one of `url` or `job_id`, but not both. + + Args: + url (`str`): + URL to send the payload to. + job_id (`str`): + ID of the source Job to trigger with the webhook payload in the environment variable WEBHOOK_PAYLOAD. + Additional environment variables are available for convenience: WEBHOOK_REPO_ID, WEBHOOK_REPO_TYPE and WEBHOOK_SECRET. + watched (`List[WebhookWatchedItem]`): + List of [`WebhookWatchedItem`] to be watched by the webhook. It can be users, orgs, models, datasets or spaces. + Watched items can also be provided as plain dictionaries. + domains (`List[Literal["repo", "discussion"]]`, optional): + List of domains to watch. It can be "repo", "discussion" or both. + secret (`str`, optional): + A secret to sign the payload with. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved token, which is the recommended + method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + [`WebhookInfo`]: + Info about the newly created webhook. + + Example: + + Create a webhook that sends a payload to a URL + ```python + >>> from huggingface_hub import create_webhook + >>> payload = create_webhook( + ... watched=[{"type": "user", "name": "julien-c"}, {"type": "org", "name": "HuggingFaceH4"}], + ... url="https://webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548", + ... domains=["repo", "discussion"], + ... secret="my-secret", + ... ) + >>> print(payload) + WebhookInfo( + id="654bbbc16f2ec14d77f109cc", + url="https://webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548", + job=None, + watched=[WebhookWatchedItem(type="user", name="julien-c"), WebhookWatchedItem(type="org", name="HuggingFaceH4")], + domains=["repo", "discussion"], + secret="my-secret", + disabled=False, + ) + ``` + + Run a Job and then create a webhook that triggers this Job + ```python + >>> from huggingface_hub import create_webhook, run_job + >>> job = run_job( + ... image="ubuntu", + ... command=["bash", "-c", r"echo An event occured in $WEBHOOK_REPO_ID: $WEBHOOK_PAYLOAD"], + ... ) + >>> payload = create_webhook( + ... watched=[{"type": "user", "name": "julien-c"}, {"type": "org", "name": "HuggingFaceH4"}], + ... job_id=job.id, + ... domains=["repo", "discussion"], + ... secret="my-secret", + ... ) + >>> print(payload) + WebhookInfo( + id="654bbbc16f2ec14d77f109cc", + url=None, + job=JobSpec( + docker_image='ubuntu', + space_id=None, + command=['bash', '-c', 'echo An event occured in $WEBHOOK_REPO_ID: $WEBHOOK_PAYLOAD'], + arguments=[], + environment={}, + secrets=[], + flavor='cpu-basic', + timeout=None, + tags=None, + arch=None + ), + watched=[WebhookWatchedItem(type="user", name="julien-c"), WebhookWatchedItem(type="org", name="HuggingFaceH4")], + domains=["repo", "discussion"], + secret="my-secret", + disabled=False, + ) + ``` + """ + watched_dicts = [asdict(item) if isinstance(item, WebhookWatchedItem) else item for item in watched] + + post_webhooks_json = {"watched": watched_dicts, "domains": domains, "secret": secret} + if url is not None and job_id is not None: + raise ValueError("Set `url` or `job_id` but not both.") + elif url is not None: + post_webhooks_json["url"] = url + elif job_id is not None: + post_webhooks_json["jobSourceId"] = job_id + else: + raise ValueError("Missing argument for webhook: `url` or `job_id`.") + + response = get_session().post( + f"{constants.ENDPOINT}/api/settings/webhooks", + json=post_webhooks_json, + headers=self._build_hf_headers(token=token), + ) + hf_raise_for_status(response) + webhook_data = response.json()["webhook"] + watched_items = [WebhookWatchedItem(type=item["type"], name=item["name"]) for item in webhook_data["watched"]] + + webhook = WebhookInfo( + id=webhook_data["id"], + url=webhook_data.get("url"), + job=JobSpec(**webhook_data["job"]) if webhook_data.get("job") else None, + watched=watched_items, + domains=webhook_data["domains"], + secret=webhook_data.get("secret"), + disabled=webhook_data["disabled"], + ) + + return webhook + + @validate_hf_hub_args + def update_webhook( + self, + webhook_id: str, + *, + url: Optional[str] = None, + watched: Optional[List[Union[Dict, WebhookWatchedItem]]] = None, + domains: Optional[List[constants.WEBHOOK_DOMAIN_T]] = None, + secret: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> WebhookInfo: + """Update an existing webhook. + + Args: + webhook_id (`str`): + The unique identifier of the webhook to be updated. + url (`str`, optional): + The URL to which the payload will be sent. + watched (`List[WebhookWatchedItem]`, optional): + List of items to watch. It can be users, orgs, models, datasets, or spaces. + Refer to [`WebhookWatchedItem`] for more details. Watched items can also be provided as plain dictionaries. + domains (`List[Literal["repo", "discussion"]]`, optional): + The domains to watch. This can include "repo", "discussion", or both. + secret (`str`, optional): + A secret to sign the payload with, providing an additional layer of security. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved token, which is the recommended + method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + [`WebhookInfo`]: + Info about the updated webhook. + + Example: + ```python + >>> from huggingface_hub import update_webhook + >>> updated_payload = update_webhook( + ... webhook_id="654bbbc16f2ec14d77f109cc", + ... url="https://new.webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548", + ... watched=[{"type": "user", "name": "julien-c"}, {"type": "org", "name": "HuggingFaceH4"}], + ... domains=["repo"], + ... secret="my-secret", + ... ) + >>> print(updated_payload) + WebhookInfo( + id="654bbbc16f2ec14d77f109cc", + job=None, + url="https://new.webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548", + watched=[WebhookWatchedItem(type="user", name="julien-c"), WebhookWatchedItem(type="org", name="HuggingFaceH4")], + domains=["repo"], + secret="my-secret", + disabled=False, + ``` + """ + if watched is None: + watched = [] + watched_dicts = [asdict(item) if isinstance(item, WebhookWatchedItem) else item for item in watched] + + response = get_session().post( + f"{constants.ENDPOINT}/api/settings/webhooks/{webhook_id}", + json={"watched": watched_dicts, "url": url, "domains": domains, "secret": secret}, + headers=self._build_hf_headers(token=token), + ) + hf_raise_for_status(response) + webhook_data = response.json()["webhook"] + + watched_items = [WebhookWatchedItem(type=item["type"], name=item["name"]) for item in webhook_data["watched"]] + + webhook = WebhookInfo( + id=webhook_data["id"], + url=webhook_data.get("url"), + job=JobSpec(**webhook_data["job"]) if webhook_data.get("job") else None, + watched=watched_items, + domains=webhook_data["domains"], + secret=webhook_data.get("secret"), + disabled=webhook_data["disabled"], + ) + + return webhook + + @validate_hf_hub_args + def enable_webhook(self, webhook_id: str, *, token: Union[bool, str, None] = None) -> WebhookInfo: + """Enable a webhook (makes it "active"). + + Args: + webhook_id (`str`): + The unique identifier of the webhook to enable. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved token, which is the recommended + method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + [`WebhookInfo`]: + Info about the enabled webhook. + + Example: + ```python + >>> from huggingface_hub import enable_webhook + >>> enabled_webhook = enable_webhook("654bbbc16f2ec14d77f109cc") + >>> enabled_webhook + WebhookInfo( + id="654bbbc16f2ec14d77f109cc", + job=None, + url="https://webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548", + watched=[WebhookWatchedItem(type="user", name="julien-c"), WebhookWatchedItem(type="org", name="HuggingFaceH4")], + domains=["repo", "discussion"], + secret="my-secret", + disabled=False, + ) + ``` + """ + response = get_session().post( + f"{constants.ENDPOINT}/api/settings/webhooks/{webhook_id}/enable", + headers=self._build_hf_headers(token=token), + ) + hf_raise_for_status(response) + webhook_data = response.json()["webhook"] + + watched_items = [WebhookWatchedItem(type=item["type"], name=item["name"]) for item in webhook_data["watched"]] + + webhook = WebhookInfo( + id=webhook_data["id"], + url=webhook_data.get("url"), + job=JobSpec(**webhook_data["job"]) if webhook_data.get("job") else None, + watched=watched_items, + domains=webhook_data["domains"], + secret=webhook_data.get("secret"), + disabled=webhook_data["disabled"], + ) + + return webhook + + @validate_hf_hub_args + def disable_webhook(self, webhook_id: str, *, token: Union[bool, str, None] = None) -> WebhookInfo: + """Disable a webhook (makes it "disabled"). + + Args: + webhook_id (`str`): + The unique identifier of the webhook to disable. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved token, which is the recommended + method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + [`WebhookInfo`]: + Info about the disabled webhook. + + Example: + ```python + >>> from huggingface_hub import disable_webhook + >>> disabled_webhook = disable_webhook("654bbbc16f2ec14d77f109cc") + >>> disabled_webhook + WebhookInfo( + id="654bbbc16f2ec14d77f109cc", + url="https://webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548", + jon=None, + watched=[WebhookWatchedItem(type="user", name="julien-c"), WebhookWatchedItem(type="org", name="HuggingFaceH4")], + domains=["repo", "discussion"], + secret="my-secret", + disabled=True, + ) + ``` + """ + response = get_session().post( + f"{constants.ENDPOINT}/api/settings/webhooks/{webhook_id}/disable", + headers=self._build_hf_headers(token=token), + ) + hf_raise_for_status(response) + webhook_data = response.json()["webhook"] + + watched_items = [WebhookWatchedItem(type=item["type"], name=item["name"]) for item in webhook_data["watched"]] + + webhook = WebhookInfo( + id=webhook_data["id"], + url=webhook_data.get("url"), + job=JobSpec(**webhook_data["job"]) if webhook_data.get("job") else None, + watched=watched_items, + domains=webhook_data["domains"], + secret=webhook_data.get("secret"), + disabled=webhook_data["disabled"], + ) + + return webhook + + @validate_hf_hub_args + def delete_webhook(self, webhook_id: str, *, token: Union[bool, str, None] = None) -> None: + """Delete a webhook. + + Args: + webhook_id (`str`): + The unique identifier of the webhook to delete. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved token, which is the recommended + method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + `None` + + Example: + ```python + >>> from huggingface_hub import delete_webhook + >>> delete_webhook("654bbbc16f2ec14d77f109cc") + ``` + """ + response = get_session().delete( + f"{constants.ENDPOINT}/api/settings/webhooks/{webhook_id}", + headers=self._build_hf_headers(token=token), + ) + hf_raise_for_status(response) + + ############# + # Internals # + ############# + + def _build_hf_headers( + self, + token: Union[bool, str, None] = None, + library_name: Optional[str] = None, + library_version: Optional[str] = None, + user_agent: Union[Dict, str, None] = None, + ) -> Dict[str, str]: + """ + Alias for [`build_hf_headers`] that uses the token from [`HfApi`] client + when `token` is not provided. + """ + if token is None: + # Cannot do `token = token or self.token` as token can be `False`. + token = self.token + return build_hf_headers( + token=token, + library_name=library_name or self.library_name, + library_version=library_version or self.library_version, + user_agent=user_agent or self.user_agent, + headers=self.headers, + ) + + def _prepare_folder_deletions( + self, + repo_id: str, + repo_type: Optional[str], + revision: Optional[str], + path_in_repo: str, + delete_patterns: Optional[Union[List[str], str]], + token: Union[bool, str, None] = None, + ) -> List[CommitOperationDelete]: + """Generate the list of Delete operations for a commit to delete files from a repo. + + List remote files and match them against the `delete_patterns` constraints. Returns a list of [`CommitOperationDelete`] + with the matching items. + + Note: `.gitattributes` file is essential to make a repo work properly on the Hub. This file will always be + kept even if it matches the `delete_patterns` constraints. + """ + if delete_patterns is None: + # If no delete patterns, no need to list and filter remote files + return [] + + # List remote files + filenames = self.list_repo_files(repo_id=repo_id, revision=revision, repo_type=repo_type, token=token) + + # Compute relative path in repo + if path_in_repo and path_in_repo not in (".", "./"): + path_in_repo = path_in_repo.strip("/") + "/" # harmonize + relpath_to_abspath = { + file[len(path_in_repo) :]: file for file in filenames if file.startswith(path_in_repo) + } + else: + relpath_to_abspath = {file: file for file in filenames} + + # Apply filter on relative paths and return + return [ + CommitOperationDelete(path_in_repo=relpath_to_abspath[relpath], is_folder=False) + for relpath in filter_repo_objects(relpath_to_abspath.keys(), allow_patterns=delete_patterns) + if relpath_to_abspath[relpath] != ".gitattributes" + ] + + def _prepare_upload_folder_additions( + self, + folder_path: Union[str, Path], + path_in_repo: str, + allow_patterns: Optional[Union[List[str], str]] = None, + ignore_patterns: Optional[Union[List[str], str]] = None, + repo_type: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> List[CommitOperationAdd]: + """Generate the list of Add operations for a commit to upload a folder. + + Files not matching the `allow_patterns` (allowlist) and `ignore_patterns` (denylist) + constraints are discarded. + """ + + folder_path = Path(folder_path).expanduser().resolve() + if not folder_path.is_dir(): + raise ValueError(f"Provided path: '{folder_path}' is not a directory") + + # List files from folder + relpath_to_abspath = { + path.relative_to(folder_path).as_posix(): path + for path in sorted(folder_path.glob("**/*")) # sorted to be deterministic + if path.is_file() + } + + # Filter files + # Patterns are applied on the path relative to `folder_path`. `path_in_repo` is prefixed after the filtering. + filtered_repo_objects = list( + filter_repo_objects( + relpath_to_abspath.keys(), allow_patterns=allow_patterns, ignore_patterns=ignore_patterns + ) + ) + + prefix = f"{path_in_repo.strip('/')}/" if path_in_repo else "" + + # If updating a README.md file, make sure the metadata format is valid + # It's better to fail early than to fail after all the files have been hashed. + if "README.md" in filtered_repo_objects: + self._validate_yaml( + content=relpath_to_abspath["README.md"].read_text(encoding="utf8"), + repo_type=repo_type, + token=token, + ) + if len(filtered_repo_objects) > 30: + log = logger.warning if len(filtered_repo_objects) > 200 else logger.info + log( + "It seems you are trying to upload a large folder at once. This might take some time and then fail if " + "the folder is too large. For such cases, it is recommended to upload in smaller batches or to use " + "`HfApi().upload_large_folder(...)`/`hf upload-large-folder` instead. For more details, " + "check out https://huggingface.co/docs/huggingface_hub/main/en/guides/upload#upload-a-large-folder." + ) + + logger.info(f"Start hashing {len(filtered_repo_objects)} files.") + operations = [ + CommitOperationAdd( + path_or_fileobj=relpath_to_abspath[relpath], # absolute path on disk + path_in_repo=prefix + relpath, # "absolute" path in repo + ) + for relpath in filtered_repo_objects + ] + logger.info(f"Finished hashing {len(filtered_repo_objects)} files.") + return operations + + def _validate_yaml(self, content: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None): + """ + Validate YAML from `README.md`, used before file hashing and upload. + + Args: + content (`str`): + Content of `README.md` to validate. + repo_type (`str`, *optional*): + The type of the repo to grant access to. Must be one of `model`, `dataset` or `space`. + Defaults to `model`. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Raises: + - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + if YAML is invalid + """ + repo_type = repo_type if repo_type is not None else constants.REPO_TYPE_MODEL + headers = self._build_hf_headers(token=token) + + response = get_session().post( + f"{self.endpoint}/api/validate-yaml", + json={"content": content, "repoType": repo_type}, + headers=headers, + ) + # Handle warnings (example: empty metadata) + response_content = response.json() + message = "\n".join([f"- {warning.get('message')}" for warning in response_content.get("warnings", [])]) + if message: + warnings.warn(f"Warnings while validating metadata in README.md:\n{message}") + + # Raise on errors + try: + hf_raise_for_status(response) + except BadRequestError as e: + errors = response_content.get("errors", []) + message = "\n".join([f"- {error.get('message')}" for error in errors]) + raise ValueError(f"Invalid metadata in README.md.\n{message}") from e + + def get_user_overview(self, username: str, token: Union[bool, str, None] = None) -> User: + """ + Get an overview of a user on the Hub. + + Args: + username (`str`): + Username of the user to get an overview of. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + `User`: A [`User`] object with the user's overview. + + Raises: + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 404 If the user does not exist on the Hub. + """ + r = get_session().get( + f"{constants.ENDPOINT}/api/users/{username}/overview", headers=self._build_hf_headers(token=token) + ) + hf_raise_for_status(r) + return User(**r.json()) + + @validate_hf_hub_args + def get_organization_overview(self, organization: str, token: Union[bool, str, None] = None) -> Organization: + """ + Get an overview of an organization on the Hub. + + Args: + organization (`str`): + Name of the organization to get an overview of. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved token, which is the recommended method + for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + `Organization`: An [`Organization`] object with the organization's overview. + + Raises: + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 404 If the organization does not exist on the Hub. + """ + r = get_session().get( + f"{constants.ENDPOINT}/api/organizations/{organization}/overview", + headers=self._build_hf_headers(token=token), + ) + hf_raise_for_status(r) + return Organization(**r.json()) + + def list_organization_members(self, organization: str, token: Union[bool, str, None] = None) -> Iterable[User]: + """ + List of members of an organization on the Hub. + + Args: + organization (`str`): + Name of the organization to get the members of. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + `Iterable[User]`: A list of [`User`] objects with the members of the organization. + + Raises: + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 404 If the organization does not exist on the Hub. + + """ + for member in paginate( + path=f"{constants.ENDPOINT}/api/organizations/{organization}/members", + params={}, + headers=self._build_hf_headers(token=token), + ): + yield User(**member) + + def list_user_followers(self, username: str, token: Union[bool, str, None] = None) -> Iterable[User]: + """ + Get the list of followers of a user on the Hub. + + Args: + username (`str`): + Username of the user to get the followers of. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + `Iterable[User]`: A list of [`User`] objects with the followers of the user. + + Raises: + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 404 If the user does not exist on the Hub. + + """ + for follower in paginate( + path=f"{constants.ENDPOINT}/api/users/{username}/followers", + params={}, + headers=self._build_hf_headers(token=token), + ): + yield User(**follower) + + def list_user_following(self, username: str, token: Union[bool, str, None] = None) -> Iterable[User]: + """ + Get the list of users followed by a user on the Hub. + + Args: + username (`str`): + Username of the user to get the users followed by. + token (`bool` or `str`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + `Iterable[User]`: A list of [`User`] objects with the users followed by the user. + + Raises: + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 404 If the user does not exist on the Hub. + + """ + for followed_user in paginate( + path=f"{constants.ENDPOINT}/api/users/{username}/following", + params={}, + headers=self._build_hf_headers(token=token), + ): + yield User(**followed_user) + + def list_papers( + self, + *, + query: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> Iterable[PaperInfo]: + """ + List daily papers on the Hugging Face Hub given a search query. + + Args: + query (`str`, *optional*): + A search query string to find papers. + If provided, returns papers that match the query. + token (Union[bool, str, None], *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + + Returns: + `Iterable[PaperInfo]`: an iterable of [`huggingface_hub.hf_api.PaperInfo`] objects. + + Example: + + ```python + >>> from huggingface_hub import HfApi + + >>> api = HfApi() + + # List all papers with "attention" in their title + >>> api.list_papers(query="attention") + ``` + """ + path = f"{self.endpoint}/api/papers/search" + params = {} + if query: + params["q"] = query + r = get_session().get( + path, + params=params, + headers=self._build_hf_headers(token=token), + ) + hf_raise_for_status(r) + for paper in r.json(): + yield PaperInfo(**paper) + + def paper_info(self, id: str) -> PaperInfo: + """ + Get information for a paper on the Hub. + + Args: + id (`str`, **optional**): + ArXiv id of the paper. + + Returns: + `PaperInfo`: A `PaperInfo` object. + + Raises: + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): + HTTP 404 If the paper does not exist on the Hub. + """ + path = f"{self.endpoint}/api/papers/{id}" + r = get_session().get(path) + hf_raise_for_status(r) + return PaperInfo(**r.json()) + + def auth_check( + self, repo_id: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None + ) -> None: + """ + Check if the provided user token has access to a specific repository on the Hugging Face Hub. + + This method verifies whether the user, authenticated via the provided token, has access to the specified + repository. If the repository is not found or if the user lacks the required permissions to access it, + the method raises an appropriate exception. + + Args: + repo_id (`str`): + The repository to check for access. Format should be `"user/repo_name"`. + Example: `"user/my-cool-model"`. + + repo_type (`str`, *optional*): + The type of the repository. Should be one of `"model"`, `"dataset"`, or `"space"`. + If not specified, the default is `"model"`. + + token `(Union[bool, str, None]`, *optional*): + A valid user access token. If not provided, the locally saved token will be used, which is the + recommended authentication method. Set to `False` to disable authentication. + Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication. + + Raises: + [`~utils.RepositoryNotFoundError`]: + Raised if the repository does not exist, is private, or the user does not have access. This can + occur if the `repo_id` or `repo_type` is incorrect or if the repository is private but the user + is not authenticated. + + [`~utils.GatedRepoError`]: + Raised if the repository exists but is gated and the user is not authorized to access it. + + Example: + Check if the user has access to a repository: + + ```python + >>> from huggingface_hub import auth_check + >>> from huggingface_hub.utils import GatedRepoError, RepositoryNotFoundError + + try: + auth_check("user/my-cool-model") + except GatedRepoError: + # Handle gated repository error + print("You do not have permission to access this gated repository.") + except RepositoryNotFoundError: + # Handle repository not found error + print("The repository was not found or you do not have access.") + ``` + + In this example: + - If the user has access, the method completes successfully. + - If the repository is gated or does not exist, appropriate exceptions are raised, allowing the user + to handle them accordingly. + """ + headers = self._build_hf_headers(token=token) + if repo_type is None: + repo_type = constants.REPO_TYPE_MODEL + if repo_type not in constants.REPO_TYPES: + raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") + path = f"{self.endpoint}/api/{repo_type}s/{repo_id}/auth-check" + r = get_session().get(path, headers=headers) + hf_raise_for_status(r) + + def run_job( + self, + *, + image: str, + command: List[str], + env: Optional[Dict[str, Any]] = None, + secrets: Optional[Dict[str, Any]] = None, + flavor: Optional[SpaceHardware] = None, + timeout: Optional[Union[int, float, str]] = None, + namespace: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> JobInfo: + """ + Run compute Jobs on Hugging Face infrastructure. + + Args: + image (`str`): + The Docker image to use. + Examples: `"ubuntu"`, `"python:3.12"`, `"pytorch/pytorch:2.6.0-cuda12.4-cudnn9-devel"`. + Example with an image from a Space: `"hf.co/spaces/lhoestq/duckdb"`. + + command (`List[str]`): + The command to run. Example: `["echo", "hello"]`. + + env (`Dict[str, Any]`, *optional*): + Defines the environment variables for the Job. + + secrets (`Dict[str, Any]`, *optional*): + Defines the secret environment variables for the Job. + + flavor (`str`, *optional*): + Flavor for the hardware, as in Hugging Face Spaces. See [`SpaceHardware`] for possible values. + Defaults to `"cpu-basic"`. + + timeout (`Union[int, float, str]`, *optional*): + Max duration for the Job: int/float with s (seconds, default), m (minutes), h (hours) or d (days). + Example: `300` or `"5m"` for 5 minutes. + + namespace (`str`, *optional*): + The namespace where the Job will be created. Defaults to the current user's namespace. + + token `(Union[bool, str, None]`, *optional*): + A valid user access token. If not provided, the locally saved token will be used, which is the + recommended authentication method. Set to `False` to disable authentication. + Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication. + + Example: + Run your first Job: + + ```python + >>> from huggingface_hub import run_job + >>> run_job(image="python:3.12", command=["python", "-c" ,"print('Hello from HF compute!')"]) + ``` + + Run a GPU Job: + + ```python + >>> from huggingface_hub import run_job + >>> image = "pytorch/pytorch:2.6.0-cuda12.4-cudnn9-devel" + >>> command = ["python", "-c", "import torch; print(f"This code ran with the following GPU: {torch.cuda.get_device_name()}")"] + >>> run_job(image=image, command=command, flavor="a10g-small") + ``` + + """ + if namespace is None: + namespace = self.whoami(token=token)["name"] + job_spec = _create_job_spec( + image=image, + command=command, + env=env, + secrets=secrets, + flavor=flavor, + timeout=timeout, + ) + response = get_session().post( + f"https://huggingface.co/api/jobs/{namespace}", + json=job_spec, + headers=self._build_hf_headers(token=token), + ) + hf_raise_for_status(response) + job_info = response.json() + return JobInfo(**job_info, endpoint=self.endpoint) + + def fetch_job_logs( + self, + *, + job_id: str, + namespace: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> Iterable[str]: + """ + Fetch all the logs from a compute Job on Hugging Face infrastructure. + + Args: + job_id (`str`): + ID of the Job. + + namespace (`str`, *optional*): + The namespace where the Job is running. Defaults to the current user's namespace. + + token `(Union[bool, str, None]`, *optional*): + A valid user access token. If not provided, the locally saved token will be used, which is the + recommended authentication method. Set to `False` to disable authentication. + Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication. + + Example: + + ```python + >>> from huggingface_hub import fetch_job_logs, run_job + >>> job = run_job(image="python:3.12", command=["python", "-c" ,"print('Hello from HF compute!')"]) + >>> for log in fetch_job_logs(job.id): + ... print(log) + Hello from HF compute! + ``` + """ + if namespace is None: + namespace = self.whoami(token=token)["name"] + logging_finished = logging_started = False + job_finished = False + # - We need to retry because sometimes the /logs doesn't return logs when the job just started. + # (for example it can return only two lines: one for "Job started" and one empty line) + # - Timeouts can happen in case of build errors + # - ChunkedEncodingError can happen in case of stopped logging in the middle of streaming + # - Infinite empty log stream can happen in case of build error + # (the logs stream is infinite and empty except for the Job started message) + # - there is a ": keep-alive" every 30 seconds + + # We don't use http_backoff since we need to check ourselves if ConnectionError.__context__ is a TimeoutError + max_retries = 5 + min_wait_time = 1 + max_wait_time = 10 + sleep_time = 0 + for _ in range(max_retries): + time.sleep(sleep_time) + sleep_time = min(max_wait_time, max(min_wait_time, sleep_time * 2)) + try: + resp = get_session().get( + f"https://huggingface.co/api/jobs/{namespace}/{job_id}/logs", + headers=self._build_hf_headers(token=token), + stream=True, + timeout=120, + ) + log = None + for line in resp.iter_lines(chunk_size=1): + line = line.decode("utf-8") + if line and line.startswith("data: {"): + data = json.loads(line[len("data: ") :]) + # timestamp = data["timestamp"] + if not data["data"].startswith("===== Job started"): + logging_started = True + log = data["data"] + yield log + logging_finished = logging_started + except requests.exceptions.ChunkedEncodingError: + # Response ended prematurely + break + except KeyboardInterrupt: + break + except requests.exceptions.ConnectionError as err: + is_timeout = err.__context__ and isinstance(getattr(err.__context__, "__cause__", None), TimeoutError) + if logging_started or not is_timeout: + raise + if logging_finished or job_finished: + break + job_status = ( + get_session() + .get( + f"https://huggingface.co/api/jobs/{namespace}/{job_id}", + headers=self._build_hf_headers(token=token), + ) + .json() + ) + if "status" in job_status and job_status["status"]["stage"] not in ("RUNNING", "UPDATING"): + job_finished = True + + def list_jobs( + self, + *, + timeout: Optional[int] = None, + namespace: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> List[JobInfo]: + """ + List compute Jobs on Hugging Face infrastructure. + + Args: + timeout (`float`, *optional*): + Whether to set a timeout for the request to the Hub. + + namespace (`str`, *optional*): + The namespace from where it lists the jobs. Defaults to the current user's namespace. + + token `(Union[bool, str, None]`, *optional*): + A valid user access token. If not provided, the locally saved token will be used, which is the + recommended authentication method. Set to `False` to disable authentication. + Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication. + """ + if namespace is None: + namespace = whoami(token=token)["name"] + response = get_session().get( + f"{self.endpoint}/api/jobs/{namespace}", + headers=self._build_hf_headers(token=token), + timeout=timeout, + ) + response.raise_for_status() + return [JobInfo(**job_info, endpoint=self.endpoint) for job_info in response.json()] + + def inspect_job( + self, + *, + job_id: str, + namespace: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> JobInfo: + """ + Inspect a compute Job on Hugging Face infrastructure. + + Args: + job_id (`str`): + ID of the Job. + + namespace (`str`, *optional*): + The namespace where the Job is running. Defaults to the current user's namespace. + + token `(Union[bool, str, None]`, *optional*): + A valid user access token. If not provided, the locally saved token will be used, which is the + recommended authentication method. Set to `False` to disable authentication. + Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication. + + Example: + + ```python + >>> from huggingface_hub import inspect_job, run_job + >>> job = run_job(image="python:3.12", command=["python", "-c" ,"print('Hello from HF compute!')"]) + >>> inspect_job(job.id) + JobInfo( + id='68780d00bbe36d38803f645f', + created_at=datetime.datetime(2025, 7, 16, 20, 35, 12, 808000, tzinfo=datetime.timezone.utc), + docker_image='python:3.12', + space_id=None, + command=['python', '-c', "print('Hello from HF compute!')"], + arguments=[], + environment={}, + secrets={}, + flavor='cpu-basic', + status=JobStatus(stage='RUNNING', message=None) + ) + ``` + """ + if namespace is None: + namespace = self.whoami(token=token)["name"] + response = get_session().get( + f"{self.endpoint}/api/jobs/{namespace}/{job_id}", + headers=self._build_hf_headers(token=token), + ) + response.raise_for_status() + return JobInfo(**response.json(), endpoint=self.endpoint) + + def cancel_job( + self, + *, + job_id: str, + namespace: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> None: + """ + Cancel a compute Job on Hugging Face infrastructure. + + Args: + job_id (`str`): + ID of the Job. + + namespace (`str`, *optional*): + The namespace where the Job is running. Defaults to the current user's namespace. + + token `(Union[bool, str, None]`, *optional*): + A valid user access token. If not provided, the locally saved token will be used, which is the + recommended authentication method. Set to `False` to disable authentication. + Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication. + """ + if namespace is None: + namespace = self.whoami(token=token)["name"] + get_session().post( + f"{self.endpoint}/api/jobs/{namespace}/{job_id}/cancel", + headers=self._build_hf_headers(token=token), + ).raise_for_status() + + @experimental + def run_uv_job( + self, + script: str, + *, + script_args: Optional[List[str]] = None, + dependencies: Optional[List[str]] = None, + python: Optional[str] = None, + image: Optional[str] = None, + env: Optional[Dict[str, Any]] = None, + secrets: Optional[Dict[str, Any]] = None, + flavor: Optional[SpaceHardware] = None, + timeout: Optional[Union[int, float, str]] = None, + namespace: Optional[str] = None, + token: Union[bool, str, None] = None, + _repo: Optional[str] = None, + ) -> JobInfo: + """ + Run a UV script Job on Hugging Face infrastructure. + + Args: + script (`str`): + Path or URL of the UV script, or a command. + + script_args (`List[str]`, *optional*) + Arguments to pass to the script or command. + + dependencies (`List[str]`, *optional*) + Dependencies to use to run the UV script. + + python (`str`, *optional*) + Use a specific Python version. Default is 3.12. + + image (`str`, *optional*, defaults to "ghcr.io/astral-sh/uv:python3.12-bookworm"): + Use a custom Docker image with `uv` installed. + + env (`Dict[str, Any]`, *optional*): + Defines the environment variables for the Job. + + secrets (`Dict[str, Any]`, *optional*): + Defines the secret environment variables for the Job. + + flavor (`str`, *optional*): + Flavor for the hardware, as in Hugging Face Spaces. See [`SpaceHardware`] for possible values. + Defaults to `"cpu-basic"`. + + timeout (`Union[int, float, str]`, *optional*): + Max duration for the Job: int/float with s (seconds, default), m (minutes), h (hours) or d (days). + Example: `300` or `"5m"` for 5 minutes. + + namespace (`str`, *optional*): + The namespace where the Job will be created. Defaults to the current user's namespace. + + token `(Union[bool, str, None]`, *optional*): + A valid user access token. If not provided, the locally saved token will be used, which is the + recommended authentication method. Set to `False` to disable authentication. + Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication. + + Example: + + Run a script from a URL: + + ```python + >>> from huggingface_hub import run_uv_job + >>> script = "https://raw.githubusercontent.com/huggingface/trl/refs/heads/main/trl/scripts/sft.py" + >>> script_args = ["--model_name_or_path", "Qwen/Qwen2-0.5B", "--dataset_name", "trl-lib/Capybara", "--push_to_hub"] + >>> run_uv_job(script, script_args=script_args, dependencies=["trl"], flavor="a10g-small") + ``` + + Run a local script: + + ```python + >>> from huggingface_hub import run_uv_job + >>> script = "my_sft.py" + >>> script_args = ["--model_name_or_path", "Qwen/Qwen2-0.5B", "--dataset_name", "trl-lib/Capybara", "--push_to_hub"] + >>> run_uv_job(script, script_args=script_args, dependencies=["trl"], flavor="a10g-small") + ``` + + Run a command: + + ```python + >>> from huggingface_hub import run_uv_job + >>> script = "lighteval" + >>> script_args= ["endpoint", "inference-providers", "model_name=openai/gpt-oss-20b,provider=auto", "lighteval|gsm8k|0|0"] + >>> run_uv_job(script, script_args=script_args, dependencies=["lighteval"], flavor="a10g-small") + ``` + """ + image = image or "ghcr.io/astral-sh/uv:python3.12-bookworm" + env = env or {} + secrets = secrets or {} + + # Build command + command, env, secrets = self._create_uv_command_env_and_secrets( + script=script, + script_args=script_args, + dependencies=dependencies, + python=python, + env=env, + secrets=secrets, + namespace=namespace, + token=token, + _repo=_repo, + ) + # Create RunCommand args + return self.run_job( + image=image, + command=command, + env=env, + secrets=secrets, + flavor=flavor, + timeout=timeout, + namespace=namespace, + token=token, + ) + + def create_scheduled_job( + self, + *, + image: str, + command: List[str], + schedule: str, + suspend: Optional[bool] = None, + concurrency: Optional[bool] = None, + env: Optional[Dict[str, Any]] = None, + secrets: Optional[Dict[str, Any]] = None, + flavor: Optional[SpaceHardware] = None, + timeout: Optional[Union[int, float, str]] = None, + namespace: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> ScheduledJobInfo: + """ + Create scheduled compute Jobs on Hugging Face infrastructure. + + Args: + image (`str`): + The Docker image to use. + Examples: `"ubuntu"`, `"python:3.12"`, `"pytorch/pytorch:2.6.0-cuda12.4-cudnn9-devel"`. + Example with an image from a Space: `"hf.co/spaces/lhoestq/duckdb"`. + + command (`List[str]`): + The command to run. Example: `["echo", "hello"]`. + + schedule (`str`): + One of "@annually", "@yearly", "@monthly", "@weekly", "@daily", "@hourly", or a + CRON schedule expression (e.g., '0 9 * * 1' for 9 AM every Monday). + + suspend (`bool`, *optional*): + If True, the scheduled Job is suspended (paused). Defaults to False. + + concurrency (`bool`, *optional*): + If True, multiple instances of this Job can run concurrently. Defaults to False. + + env (`Dict[str, Any]`, *optional*): + Defines the environment variables for the Job. + + secrets (`Dict[str, Any]`, *optional*): + Defines the secret environment variables for the Job. + + flavor (`str`, *optional*): + Flavor for the hardware, as in Hugging Face Spaces. See [`SpaceHardware`] for possible values. + Defaults to `"cpu-basic"`. + + timeout (`Union[int, float, str]`, *optional*): + Max duration for the Job: int/float with s (seconds, default), m (minutes), h (hours) or d (days). + Example: `300` or `"5m"` for 5 minutes. + + namespace (`str`, *optional*): + The namespace where the Job will be created. Defaults to the current user's namespace. + + token `(Union[bool, str, None]`, *optional*): + A valid user access token. If not provided, the locally saved token will be used, which is the + recommended authentication method. Set to `False` to disable authentication. + Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication. + + Example: + Create your first scheduled Job: + + ```python + >>> from huggingface_hub import create_scheduled_job + >>> create_scheduled_job(image="python:3.12", command=["python", "-c" ,"print('Hello from HF compute!')"], schedule="@hourly") + ``` + + Use a CRON schedule expression: + + ```python + >>> from huggingface_hub import create_scheduled_job + >>> create_scheduled_job(image="python:3.12", command=["python", "-c" ,"print('this runs every 5min')"], schedule="*/5 * * * *") + ``` + + Create a scheduled GPU Job: + + ```python + >>> from huggingface_hub import create_scheduled_job + >>> image = "pytorch/pytorch:2.6.0-cuda12.4-cudnn9-devel" + >>> command = ["python", "-c", "import torch; print(f"This code ran with the following GPU: {torch.cuda.get_device_name()}")"] + >>> create_scheduled_job(image, command, flavor="a10g-small", schedule="@hourly") + ``` + + """ + if namespace is None: + namespace = self.whoami(token=token)["name"] + + # prepare payload to send to HF Jobs API + job_spec = _create_job_spec( + image=image, + command=command, + env=env, + secrets=secrets, + flavor=flavor, + timeout=timeout, + ) + input_json: Dict[str, Any] = { + "jobSpec": job_spec, + "schedule": schedule, + } + if concurrency is not None: + input_json["concurrency"] = concurrency + if suspend is not None: + input_json["suspend"] = suspend + response = get_session().post( + f"https://huggingface.co/api/scheduled-jobs/{namespace}", + json=input_json, + headers=self._build_hf_headers(token=token), + ) + hf_raise_for_status(response) + scheduled_job_info = response.json() + return ScheduledJobInfo(**scheduled_job_info) + + def list_scheduled_jobs( + self, + *, + timeout: Optional[int] = None, + namespace: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> List[ScheduledJobInfo]: + """ + List scheduled compute Jobs on Hugging Face infrastructure. + + Args: + timeout (`float`, *optional*): + Whether to set a timeout for the request to the Hub. + + namespace (`str`, *optional*): + The namespace from where it lists the jobs. Defaults to the current user's namespace. + + token `(Union[bool, str, None]`, *optional*): + A valid user access token. If not provided, the locally saved token will be used, which is the + recommended authentication method. Set to `False` to disable authentication. + Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication. + """ + if namespace is None: + namespace = self.whoami(token=token)["name"] + response = get_session().get( + f"{self.endpoint}/api/scheduled-jobs/{namespace}", + headers=self._build_hf_headers(token=token), + timeout=timeout, + ) + hf_raise_for_status(response) + return [ScheduledJobInfo(**scheduled_job_info) for scheduled_job_info in response.json()] + + def inspect_scheduled_job( + self, + *, + scheduled_job_id: str, + namespace: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> ScheduledJobInfo: + """ + Inspect a scheduled compute Job on Hugging Face infrastructure. + + Args: + scheduled_job_id (`str`): + ID of the scheduled Job. + + namespace (`str`, *optional*): + The namespace where the scheduled Job is. Defaults to the current user's namespace. + + token `(Union[bool, str, None]`, *optional*): + A valid user access token. If not provided, the locally saved token will be used, which is the + recommended authentication method. Set to `False` to disable authentication. + Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication. + + Example: + + ```python + >>> from huggingface_hub import inspect_job, create_scheduled_job + >>> scheduled_job = create_scheduled_job(image="python:3.12", command=["python", "-c" ,"print('Hello from HF compute!')"], schedule="@hourly") + >>> inspect_scheduled_job(scheduled_job.id) + ``` + """ + if namespace is None: + namespace = self.whoami(token=token)["name"] + response = get_session().get( + f"{self.endpoint}/api/scheduled-jobs/{namespace}/{scheduled_job_id}", + headers=self._build_hf_headers(token=token), + ) + hf_raise_for_status(response) + return ScheduledJobInfo(**response.json()) + + def delete_scheduled_job( + self, + *, + scheduled_job_id: str, + namespace: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> None: + """ + Delete a scheduled compute Job on Hugging Face infrastructure. + + Args: + scheduled_job_id (`str`): + ID of the scheduled Job. + + namespace (`str`, *optional*): + The namespace where the scheduled Job is. Defaults to the current user's namespace. + + token `(Union[bool, str, None]`, *optional*): + A valid user access token. If not provided, the locally saved token will be used, which is the + recommended authentication method. Set to `False` to disable authentication. + Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication. + """ + if namespace is None: + namespace = self.whoami(token=token)["name"] + response = get_session().delete( + f"{self.endpoint}/api/scheduled-jobs/{namespace}/{scheduled_job_id}", + headers=self._build_hf_headers(token=token), + ) + hf_raise_for_status(response) + + def suspend_scheduled_job( + self, + *, + scheduled_job_id: str, + namespace: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> None: + """ + Suspend (pause) a scheduled compute Job on Hugging Face infrastructure. + + Args: + scheduled_job_id (`str`): + ID of the scheduled Job. + + namespace (`str`, *optional*): + The namespace where the scheduled Job is. Defaults to the current user's namespace. + + token `(Union[bool, str, None]`, *optional*): + A valid user access token. If not provided, the locally saved token will be used, which is the + recommended authentication method. Set to `False` to disable authentication. + Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication. + """ + if namespace is None: + namespace = self.whoami(token=token)["name"] + get_session().post( + f"{self.endpoint}/api/scheduled-jobs/{namespace}/{scheduled_job_id}/suspend", + headers=self._build_hf_headers(token=token), + ).raise_for_status() + + def resume_scheduled_job( + self, + *, + scheduled_job_id: str, + namespace: Optional[str] = None, + token: Union[bool, str, None] = None, + ) -> None: + """ + Resume (unpause) a scheduled compute Job on Hugging Face infrastructure. + + Args: + scheduled_job_id (`str`): + ID of the scheduled Job. + + namespace (`str`, *optional*): + The namespace where the scheduled Job is. Defaults to the current user's namespace. + + token `(Union[bool, str, None]`, *optional*): + A valid user access token. If not provided, the locally saved token will be used, which is the + recommended authentication method. Set to `False` to disable authentication. + Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication. + """ + if namespace is None: + namespace = self.whoami(token=token)["name"] + get_session().post( + f"{self.endpoint}/api/scheduled-jobs/{namespace}/{scheduled_job_id}/resume", + headers=self._build_hf_headers(token=token), + ).raise_for_status() + + @experimental + def create_scheduled_uv_job( + self, + script: str, + *, + script_args: Optional[List[str]] = None, + schedule: str, + suspend: Optional[bool] = None, + concurrency: Optional[bool] = None, + dependencies: Optional[List[str]] = None, + python: Optional[str] = None, + image: Optional[str] = None, + env: Optional[Dict[str, Any]] = None, + secrets: Optional[Dict[str, Any]] = None, + flavor: Optional[SpaceHardware] = None, + timeout: Optional[Union[int, float, str]] = None, + namespace: Optional[str] = None, + token: Union[bool, str, None] = None, + _repo: Optional[str] = None, + ) -> ScheduledJobInfo: + """ + Run a UV script Job on Hugging Face infrastructure. + + Args: + script (`str`): + Path or URL of the UV script, or a command. + + script_args (`List[str]`, *optional*) + Arguments to pass to the script, or a command. + + schedule (`str`): + One of "@annually", "@yearly", "@monthly", "@weekly", "@daily", "@hourly", or a + CRON schedule expression (e.g., '0 9 * * 1' for 9 AM every Monday). + + suspend (`bool`, *optional*): + If True, the scheduled Job is suspended (paused). Defaults to False. + + concurrency (`bool`, *optional*): + If True, multiple instances of this Job can run concurrently. Defaults to False. + + dependencies (`List[str]`, *optional*) + Dependencies to use to run the UV script. + + python (`str`, *optional*) + Use a specific Python version. Default is 3.12. + + image (`str`, *optional*, defaults to "ghcr.io/astral-sh/uv:python3.12-bookworm"): + Use a custom Docker image with `uv` installed. + + env (`Dict[str, Any]`, *optional*): + Defines the environment variables for the Job. + + secrets (`Dict[str, Any]`, *optional*): + Defines the secret environment variables for the Job. + + flavor (`str`, *optional*): + Flavor for the hardware, as in Hugging Face Spaces. See [`SpaceHardware`] for possible values. + Defaults to `"cpu-basic"`. + + timeout (`Union[int, float, str]`, *optional*): + Max duration for the Job: int/float with s (seconds, default), m (minutes), h (hours) or d (days). + Example: `300` or `"5m"` for 5 minutes. + + namespace (`str`, *optional*): + The namespace where the Job will be created. Defaults to the current user's namespace. + + token `(Union[bool, str, None]`, *optional*): + A valid user access token. If not provided, the locally saved token will be used, which is the + recommended authentication method. Set to `False` to disable authentication. + Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication. + + Example: + + Schedule a script from a URL: + + ```python + >>> from huggingface_hub import create_scheduled_uv_job + >>> script = "https://raw.githubusercontent.com/huggingface/trl/refs/heads/main/trl/scripts/sft.py" + >>> script_args = ["--model_name_or_path", "Qwen/Qwen2-0.5B", "--dataset_name", "trl-lib/Capybara", "--push_to_hub"] + >>> create_scheduled_uv_job(script, script_args=script_args, dependencies=["trl"], flavor="a10g-small", schedule="@weekly") + ``` + + Schedule a local script: + + ```python + >>> from huggingface_hub import create_scheduled_uv_job + >>> script = "my_sft.py" + >>> script_args = ["--model_name_or_path", "Qwen/Qwen2-0.5B", "--dataset_name", "trl-lib/Capybara", "--push_to_hub"] + >>> create_scheduled_uv_job(script, script_args=script_args, dependencies=["trl"], flavor="a10g-small", schedule="@weekly") + ``` + + Schedule a command: + + ```python + >>> from huggingface_hub import create_scheduled_uv_job + >>> script = "lighteval" + >>> script_args= ["endpoint", "inference-providers", "model_name=openai/gpt-oss-20b,provider=auto", "lighteval|gsm8k|0|0"] + >>> create_scheduled_uv_job(script, script_args=script_args, dependencies=["lighteval"], flavor="a10g-small", schedule="@weekly") + ``` + """ + image = image or "ghcr.io/astral-sh/uv:python3.12-bookworm" + # Build command + command, env, secrets = self._create_uv_command_env_and_secrets( + script=script, + script_args=script_args, + dependencies=dependencies, + python=python, + env=env, + secrets=secrets, + namespace=namespace, + token=token, + _repo=_repo, + ) + # Create RunCommand args + return self.create_scheduled_job( + image=image, + command=command, + schedule=schedule, + suspend=suspend, + concurrency=concurrency, + env=env, + secrets=secrets, + flavor=flavor, + timeout=timeout, + namespace=namespace, + token=token, + ) + + def _create_uv_command_env_and_secrets( + self, + *, + script: str, + script_args: Optional[List[str]], + dependencies: Optional[List[str]], + python: Optional[str], + env: Optional[Dict[str, Any]], + secrets: Optional[Dict[str, Any]], + namespace: Optional[str], + token: Union[bool, str, None], + _repo: Optional[str], + ) -> Tuple[List[str], Dict[str, Any], Dict[str, Any]]: + env = env or {} + secrets = secrets or {} + + # Build command + uv_args = [] + if dependencies: + for dependency in dependencies: + uv_args += ["--with", dependency] + if python: + uv_args += ["--python", python] + script_args = script_args or [] + + if namespace is None: + namespace = self.whoami(token=token)["name"] + + is_url = script.startswith("http://") or script.startswith("https://") + if is_url or not Path(script).is_file(): + # Direct URL execution or command - no upload needed + command = ["uv", "run"] + uv_args + [script] + script_args + else: + # Local file - upload to HF + script_path = Path(script) + filename = script_path.name + # Parse repo + if _repo: + repo_id = _repo + if "/" not in repo_id: + repo_id = f"{namespace}/{repo_id}" + else: + repo_id = f"{namespace}/hf-cli-jobs-uv-run-scripts" + + # Create repo if needed + try: + self.repo_info(repo_id, repo_type="dataset") + logger.debug(f"Using existing repository: {repo_id}") + except RepositoryNotFoundError: + logger.info(f"Creating repository: {repo_id}") + create_repo(repo_id, repo_type="dataset", private=True, exist_ok=True) + + # Upload script + logger.info(f"Uploading {script_path.name} to {repo_id}...") + with open(script_path, "r") as f: + script_content = f.read() + + commit_hash = self.upload_file( + path_or_fileobj=script_content.encode(), + path_in_repo=filename, + repo_id=repo_id, + repo_type="dataset", + ).oid + + script_url = f"{self.endpoint}/datasets/{repo_id}/resolve/{commit_hash}/{filename}" + repo_url = f"{self.endpoint}/datasets/{repo_id}" + + logger.debug(f"✓ Script uploaded to: {repo_url}/blob/main/{filename}") + + # Create and upload minimal README + timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S UTC") + readme_content = dedent( + f""" + --- + tags: + - hf-cli-jobs-uv-script + - ephemeral + viewer: false + --- + + # UV Script: {filename} + + Executed via `hf jobs uv run` on {timestamp} + + ## Run this script + + ```bash + hf jobs uv run {filename} + ``` + + --- + *Created with [hf jobs](https://huggingface.co/docs/huggingface_hub/main/en/guides/jobs)* + """ + ) + self.upload_file( + path_or_fileobj=readme_content.encode(), + path_in_repo="README.md", + repo_id=repo_id, + repo_type="dataset", + ) + + secrets["UV_SCRIPT_HF_TOKEN"] = token or self.token or get_token() + env["UV_SCRIPT_URL"] = script_url + + pre_command = ( + dedent( + """ + import urllib.request + import os + from pathlib import Path + o = urllib.request.build_opener() + o.addheaders = [("Authorization", "Bearer " + os.environ["UV_SCRIPT_HF_TOKEN"])] + Path("/tmp/script.py").write_bytes(o.open(os.environ["UV_SCRIPT_URL"]).read()) + """ + ) + .strip() + .replace('"', r"\"") + .split("\n") + ) + pre_command = ["python", "-c", '"' + "; ".join(pre_command) + '"'] + command = ["uv", "run"] + uv_args + ["/tmp/script.py"] + script_args + command = ["bash", "-c", " ".join(pre_command) + " && " + " ".join(command)] + return command, env, secrets + + +def _parse_revision_from_pr_url(pr_url: str) -> str: + """Safely parse revision number from a PR url. + + Example: + ```py + >>> _parse_revision_from_pr_url("https://huggingface.co/bigscience/bloom/discussions/2") + "refs/pr/2" + ``` + """ + re_match = re.match(_REGEX_DISCUSSION_URL, pr_url) + if re_match is None: + raise RuntimeError(f"Unexpected response from the hub, expected a Pull Request URL but got: '{pr_url}'") + return f"refs/pr/{re_match[1]}" + + +api = HfApi() + +whoami = api.whoami +auth_check = api.auth_check +get_token_permission = api.get_token_permission + +list_models = api.list_models +model_info = api.model_info + +list_datasets = api.list_datasets +dataset_info = api.dataset_info + +list_spaces = api.list_spaces +space_info = api.space_info + +list_papers = api.list_papers +paper_info = api.paper_info + +repo_exists = api.repo_exists +revision_exists = api.revision_exists +file_exists = api.file_exists +repo_info = api.repo_info +list_repo_files = api.list_repo_files +list_repo_refs = api.list_repo_refs +list_repo_commits = api.list_repo_commits +list_repo_tree = api.list_repo_tree +get_paths_info = api.get_paths_info + +get_model_tags = api.get_model_tags +get_dataset_tags = api.get_dataset_tags + +create_commit = api.create_commit +create_repo = api.create_repo +delete_repo = api.delete_repo +update_repo_visibility = api.update_repo_visibility +update_repo_settings = api.update_repo_settings +move_repo = api.move_repo +upload_file = api.upload_file +upload_folder = api.upload_folder +delete_file = api.delete_file +delete_folder = api.delete_folder +delete_files = api.delete_files +upload_large_folder = api.upload_large_folder +preupload_lfs_files = api.preupload_lfs_files +create_branch = api.create_branch +delete_branch = api.delete_branch +create_tag = api.create_tag +delete_tag = api.delete_tag +get_full_repo_name = api.get_full_repo_name + +# Danger-zone API +super_squash_history = api.super_squash_history +list_lfs_files = api.list_lfs_files +permanently_delete_lfs_files = api.permanently_delete_lfs_files + +# Safetensors helpers +get_safetensors_metadata = api.get_safetensors_metadata +parse_safetensors_file_metadata = api.parse_safetensors_file_metadata + +# Background jobs +run_as_future = api.run_as_future + +# Activity API +list_liked_repos = api.list_liked_repos +list_repo_likers = api.list_repo_likers +unlike = api.unlike + +# Community API +get_discussion_details = api.get_discussion_details +get_repo_discussions = api.get_repo_discussions +create_discussion = api.create_discussion +create_pull_request = api.create_pull_request +change_discussion_status = api.change_discussion_status +comment_discussion = api.comment_discussion +edit_discussion_comment = api.edit_discussion_comment +rename_discussion = api.rename_discussion +merge_pull_request = api.merge_pull_request + +# Space API +add_space_secret = api.add_space_secret +delete_space_secret = api.delete_space_secret +get_space_variables = api.get_space_variables +add_space_variable = api.add_space_variable +delete_space_variable = api.delete_space_variable +get_space_runtime = api.get_space_runtime +request_space_hardware = api.request_space_hardware +set_space_sleep_time = api.set_space_sleep_time +pause_space = api.pause_space +restart_space = api.restart_space +duplicate_space = api.duplicate_space +request_space_storage = api.request_space_storage +delete_space_storage = api.delete_space_storage + +# Inference Endpoint API +list_inference_endpoints = api.list_inference_endpoints +create_inference_endpoint = api.create_inference_endpoint +get_inference_endpoint = api.get_inference_endpoint +update_inference_endpoint = api.update_inference_endpoint +delete_inference_endpoint = api.delete_inference_endpoint +pause_inference_endpoint = api.pause_inference_endpoint +resume_inference_endpoint = api.resume_inference_endpoint +scale_to_zero_inference_endpoint = api.scale_to_zero_inference_endpoint +create_inference_endpoint_from_catalog = api.create_inference_endpoint_from_catalog +list_inference_catalog = api.list_inference_catalog + +# Collections API +get_collection = api.get_collection +list_collections = api.list_collections +create_collection = api.create_collection +update_collection_metadata = api.update_collection_metadata +delete_collection = api.delete_collection +add_collection_item = api.add_collection_item +update_collection_item = api.update_collection_item +delete_collection_item = api.delete_collection_item +delete_collection_item = api.delete_collection_item + +# Access requests API +list_pending_access_requests = api.list_pending_access_requests +list_accepted_access_requests = api.list_accepted_access_requests +list_rejected_access_requests = api.list_rejected_access_requests +cancel_access_request = api.cancel_access_request +accept_access_request = api.accept_access_request +reject_access_request = api.reject_access_request +grant_access = api.grant_access + +# Webhooks API +create_webhook = api.create_webhook +disable_webhook = api.disable_webhook +delete_webhook = api.delete_webhook +enable_webhook = api.enable_webhook +get_webhook = api.get_webhook +list_webhooks = api.list_webhooks +update_webhook = api.update_webhook + + +# User API +get_user_overview = api.get_user_overview +get_organization_overview = api.get_organization_overview +list_organization_members = api.list_organization_members +list_user_followers = api.list_user_followers +list_user_following = api.list_user_following + +# Jobs API +run_job = api.run_job +fetch_job_logs = api.fetch_job_logs +list_jobs = api.list_jobs +inspect_job = api.inspect_job +cancel_job = api.cancel_job +run_uv_job = api.run_uv_job +create_scheduled_job = api.create_scheduled_job +list_scheduled_jobs = api.list_scheduled_jobs +inspect_scheduled_job = api.inspect_scheduled_job +delete_scheduled_job = api.delete_scheduled_job +suspend_scheduled_job = api.suspend_scheduled_job +resume_scheduled_job = api.resume_scheduled_job +create_scheduled_uv_job = api.create_scheduled_uv_job diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/hf_file_system.py b/venv/lib/python3.13/site-packages/huggingface_hub/hf_file_system.py new file mode 100644 index 0000000000000000000000000000000000000000..a29d38a92ee4ddc9348e2575769ca36de6ceab08 --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/hf_file_system.py @@ -0,0 +1,1150 @@ +import os +import re +import tempfile +from collections import deque +from dataclasses import dataclass, field +from datetime import datetime +from itertools import chain +from pathlib import Path +from typing import Any, Dict, Iterator, List, NoReturn, Optional, Tuple, Union +from urllib.parse import quote, unquote + +import fsspec +from fsspec.callbacks import _DEFAULT_CALLBACK, NoOpCallback, TqdmCallback +from fsspec.utils import isfilelike +from requests import Response + +from . import constants +from ._commit_api import CommitOperationCopy, CommitOperationDelete +from .errors import EntryNotFoundError, RepositoryNotFoundError, RevisionNotFoundError +from .file_download import hf_hub_url, http_get +from .hf_api import HfApi, LastCommitInfo, RepoFile +from .utils import HFValidationError, hf_raise_for_status, http_backoff + + +# Regex used to match special revisions with "/" in them (see #1710) +SPECIAL_REFS_REVISION_REGEX = re.compile( + r""" + (^refs\/convert\/\w+) # `refs/convert/parquet` revisions + | + (^refs\/pr\/\d+) # PR revisions + """, + re.VERBOSE, +) + + +@dataclass +class HfFileSystemResolvedPath: + """Data structure containing information about a resolved Hugging Face file system path.""" + + repo_type: str + repo_id: str + revision: str + path_in_repo: str + # The part placed after '@' in the initial path. It can be a quoted or unquoted refs revision. + # Used to reconstruct the unresolved path to return to the user. + _raw_revision: Optional[str] = field(default=None, repr=False) + + def unresolve(self) -> str: + repo_path = constants.REPO_TYPES_URL_PREFIXES.get(self.repo_type, "") + self.repo_id + if self._raw_revision: + return f"{repo_path}@{self._raw_revision}/{self.path_in_repo}".rstrip("/") + elif self.revision != constants.DEFAULT_REVISION: + return f"{repo_path}@{safe_revision(self.revision)}/{self.path_in_repo}".rstrip("/") + else: + return f"{repo_path}/{self.path_in_repo}".rstrip("/") + + +class HfFileSystem(fsspec.AbstractFileSystem): + """ + Access a remote Hugging Face Hub repository as if were a local file system. + + > [!WARNING] + > [`HfFileSystem`] provides fsspec compatibility, which is useful for libraries that require it (e.g., reading + > Hugging Face datasets directly with `pandas`). However, it introduces additional overhead due to this compatibility + > layer. For better performance and reliability, it's recommended to use `HfApi` methods when possible. + + Args: + token (`str` or `bool`, *optional*): + A valid user access token (string). Defaults to the locally saved + token, which is the recommended method for authentication (see + https://huggingface.co/docs/huggingface_hub/quick-start#authentication). + To disable authentication, pass `False`. + endpoint (`str`, *optional*): + Endpoint of the Hub. Defaults to . + Usage: + + ```python + >>> from huggingface_hub import HfFileSystem + + >>> fs = HfFileSystem() + + >>> # List files + >>> fs.glob("my-username/my-model/*.bin") + ['my-username/my-model/pytorch_model.bin'] + >>> fs.ls("datasets/my-username/my-dataset", detail=False) + ['datasets/my-username/my-dataset/.gitattributes', 'datasets/my-username/my-dataset/README.md', 'datasets/my-username/my-dataset/data.json'] + + >>> # Read/write files + >>> with fs.open("my-username/my-model/pytorch_model.bin") as f: + ... data = f.read() + >>> with fs.open("my-username/my-model/pytorch_model.bin", "wb") as f: + ... f.write(data) + ``` + """ + + root_marker = "" + protocol = "hf" + + def __init__( + self, + *args, + endpoint: Optional[str] = None, + token: Union[bool, str, None] = None, + block_size: Optional[int] = None, + **storage_options, + ): + super().__init__(*args, **storage_options) + self.endpoint = endpoint or constants.ENDPOINT + self.token = token + self._api = HfApi(endpoint=endpoint, token=token) + self.block_size = block_size + # Maps (repo_type, repo_id, revision) to a 2-tuple with: + # * the 1st element indicating whether the repositoy and the revision exist + # * the 2nd element being the exception raised if the repository or revision doesn't exist + self._repo_and_revision_exists_cache: Dict[ + Tuple[str, str, Optional[str]], Tuple[bool, Optional[Exception]] + ] = {} + # Maps parent directory path to path infos + self.dircache: Dict[str, List[Dict[str, Any]]] = {} + + def _repo_and_revision_exist( + self, repo_type: str, repo_id: str, revision: Optional[str] + ) -> Tuple[bool, Optional[Exception]]: + if (repo_type, repo_id, revision) not in self._repo_and_revision_exists_cache: + try: + self._api.repo_info( + repo_id, revision=revision, repo_type=repo_type, timeout=constants.HF_HUB_ETAG_TIMEOUT + ) + except (RepositoryNotFoundError, HFValidationError) as e: + self._repo_and_revision_exists_cache[(repo_type, repo_id, revision)] = False, e + self._repo_and_revision_exists_cache[(repo_type, repo_id, None)] = False, e + except RevisionNotFoundError as e: + self._repo_and_revision_exists_cache[(repo_type, repo_id, revision)] = False, e + self._repo_and_revision_exists_cache[(repo_type, repo_id, None)] = True, None + else: + self._repo_and_revision_exists_cache[(repo_type, repo_id, revision)] = True, None + self._repo_and_revision_exists_cache[(repo_type, repo_id, None)] = True, None + return self._repo_and_revision_exists_cache[(repo_type, repo_id, revision)] + + def resolve_path(self, path: str, revision: Optional[str] = None) -> HfFileSystemResolvedPath: + """ + Resolve a Hugging Face file system path into its components. + + Args: + path (`str`): + Path to resolve. + revision (`str`, *optional*): + The revision of the repo to resolve. Defaults to the revision specified in the path. + + Returns: + [`HfFileSystemResolvedPath`]: Resolved path information containing `repo_type`, `repo_id`, `revision` and `path_in_repo`. + + Raises: + `ValueError`: + If path contains conflicting revision information. + `NotImplementedError`: + If trying to list repositories. + """ + + def _align_revision_in_path_with_revision( + revision_in_path: Optional[str], revision: Optional[str] + ) -> Optional[str]: + if revision is not None: + if revision_in_path is not None and revision_in_path != revision: + raise ValueError( + f'Revision specified in path ("{revision_in_path}") and in `revision` argument ("{revision}")' + " are not the same." + ) + else: + revision = revision_in_path + return revision + + path = self._strip_protocol(path) + if not path: + # can't list repositories at root + raise NotImplementedError("Access to repositories lists is not implemented.") + elif path.split("/")[0] + "/" in constants.REPO_TYPES_URL_PREFIXES.values(): + if "/" not in path: + # can't list repositories at the repository type level + raise NotImplementedError("Access to repositories lists is not implemented.") + repo_type, path = path.split("/", 1) + repo_type = constants.REPO_TYPES_MAPPING[repo_type] + else: + repo_type = constants.REPO_TYPE_MODEL + if path.count("/") > 0: + if "@" in path: + repo_id, revision_in_path = path.split("@", 1) + if "/" in revision_in_path: + match = SPECIAL_REFS_REVISION_REGEX.search(revision_in_path) + if match is not None and revision in (None, match.group()): + # Handle `refs/convert/parquet` and PR revisions separately + path_in_repo = SPECIAL_REFS_REVISION_REGEX.sub("", revision_in_path).lstrip("/") + revision_in_path = match.group() + else: + revision_in_path, path_in_repo = revision_in_path.split("/", 1) + else: + path_in_repo = "" + revision = _align_revision_in_path_with_revision(unquote(revision_in_path), revision) + repo_and_revision_exist, err = self._repo_and_revision_exist(repo_type, repo_id, revision) + if not repo_and_revision_exist: + _raise_file_not_found(path, err) + else: + revision_in_path = None + repo_id_with_namespace = "/".join(path.split("/")[:2]) + path_in_repo_with_namespace = "/".join(path.split("/")[2:]) + repo_id_without_namespace = path.split("/")[0] + path_in_repo_without_namespace = "/".join(path.split("/")[1:]) + repo_id = repo_id_with_namespace + path_in_repo = path_in_repo_with_namespace + repo_and_revision_exist, err = self._repo_and_revision_exist(repo_type, repo_id, revision) + if not repo_and_revision_exist: + if isinstance(err, (RepositoryNotFoundError, HFValidationError)): + repo_id = repo_id_without_namespace + path_in_repo = path_in_repo_without_namespace + repo_and_revision_exist, _ = self._repo_and_revision_exist(repo_type, repo_id, revision) + if not repo_and_revision_exist: + _raise_file_not_found(path, err) + else: + _raise_file_not_found(path, err) + else: + repo_id = path + path_in_repo = "" + if "@" in path: + repo_id, revision_in_path = path.split("@", 1) + revision = _align_revision_in_path_with_revision(unquote(revision_in_path), revision) + else: + revision_in_path = None + repo_and_revision_exist, _ = self._repo_and_revision_exist(repo_type, repo_id, revision) + if not repo_and_revision_exist: + raise NotImplementedError("Access to repositories lists is not implemented.") + + revision = revision if revision is not None else constants.DEFAULT_REVISION + return HfFileSystemResolvedPath(repo_type, repo_id, revision, path_in_repo, _raw_revision=revision_in_path) + + def invalidate_cache(self, path: Optional[str] = None) -> None: + """ + Clear the cache for a given path. + + For more details, refer to [fsspec documentation](https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.spec.AbstractFileSystem.invalidate_cache). + + Args: + path (`str`, *optional*): + Path to clear from cache. If not provided, clear the entire cache. + + """ + if not path: + self.dircache.clear() + self._repo_and_revision_exists_cache.clear() + else: + resolved_path = self.resolve_path(path) + path = resolved_path.unresolve() + while path: + self.dircache.pop(path, None) + path = self._parent(path) + + # Only clear repo cache if path is to repo root + if not resolved_path.path_in_repo: + self._repo_and_revision_exists_cache.pop((resolved_path.repo_type, resolved_path.repo_id, None), None) + self._repo_and_revision_exists_cache.pop( + (resolved_path.repo_type, resolved_path.repo_id, resolved_path.revision), None + ) + + def _open( + self, + path: str, + mode: str = "rb", + revision: Optional[str] = None, + block_size: Optional[int] = None, + **kwargs, + ) -> "HfFileSystemFile": + block_size = block_size if block_size is not None else self.block_size + if block_size is not None: + kwargs["block_size"] = block_size + if "a" in mode: + raise NotImplementedError("Appending to remote files is not yet supported.") + if block_size == 0: + return HfFileSystemStreamFile(self, path, mode=mode, revision=revision, **kwargs) + else: + return HfFileSystemFile(self, path, mode=mode, revision=revision, **kwargs) + + def _rm(self, path: str, revision: Optional[str] = None, **kwargs) -> None: + resolved_path = self.resolve_path(path, revision=revision) + self._api.delete_file( + path_in_repo=resolved_path.path_in_repo, + repo_id=resolved_path.repo_id, + token=self.token, + repo_type=resolved_path.repo_type, + revision=resolved_path.revision, + commit_message=kwargs.get("commit_message"), + commit_description=kwargs.get("commit_description"), + ) + self.invalidate_cache(path=resolved_path.unresolve()) + + def rm( + self, + path: str, + recursive: bool = False, + maxdepth: Optional[int] = None, + revision: Optional[str] = None, + **kwargs, + ) -> None: + """ + Delete files from a repository. + + For more details, refer to [fsspec documentation](https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.spec.AbstractFileSystem.rm). + + > [!WARNING] + > Note: When possible, use `HfApi.delete_file()` for better performance. + + Args: + path (`str`): + Path to delete. + recursive (`bool`, *optional*): + If True, delete directory and all its contents. Defaults to False. + maxdepth (`int`, *optional*): + Maximum number of subdirectories to visit when deleting recursively. + revision (`str`, *optional*): + The git revision to delete from. + + """ + resolved_path = self.resolve_path(path, revision=revision) + paths = self.expand_path(path, recursive=recursive, maxdepth=maxdepth, revision=revision) + paths_in_repo = [self.resolve_path(path).path_in_repo for path in paths if not self.isdir(path)] + operations = [CommitOperationDelete(path_in_repo=path_in_repo) for path_in_repo in paths_in_repo] + commit_message = f"Delete {path} " + commit_message += "recursively " if recursive else "" + commit_message += f"up to depth {maxdepth} " if maxdepth is not None else "" + # TODO: use `commit_description` to list all the deleted paths? + self._api.create_commit( + repo_id=resolved_path.repo_id, + repo_type=resolved_path.repo_type, + token=self.token, + operations=operations, + revision=resolved_path.revision, + commit_message=kwargs.get("commit_message", commit_message), + commit_description=kwargs.get("commit_description"), + ) + self.invalidate_cache(path=resolved_path.unresolve()) + + def ls( + self, path: str, detail: bool = True, refresh: bool = False, revision: Optional[str] = None, **kwargs + ) -> List[Union[str, Dict[str, Any]]]: + """ + List the contents of a directory. + + For more details, refer to [fsspec documentation](https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.spec.AbstractFileSystem.ls). + + > [!WARNING] + > Note: When possible, use `HfApi.list_repo_tree()` for better performance. + + Args: + path (`str`): + Path to the directory. + detail (`bool`, *optional*): + If True, returns a list of dictionaries containing file information. If False, + returns a list of file paths. Defaults to True. + refresh (`bool`, *optional*): + If True, bypass the cache and fetch the latest data. Defaults to False. + revision (`str`, *optional*): + The git revision to list from. + + Returns: + `List[Union[str, Dict[str, Any]]]`: List of file paths (if detail=False) or list of file information + dictionaries (if detail=True). + """ + resolved_path = self.resolve_path(path, revision=revision) + path = resolved_path.unresolve() + try: + out = self._ls_tree(path, refresh=refresh, revision=revision, **kwargs) + except EntryNotFoundError: + # Path could be a file + if not resolved_path.path_in_repo: + _raise_file_not_found(path, None) + out = self._ls_tree(self._parent(path), refresh=refresh, revision=revision, **kwargs) + out = [o for o in out if o["name"] == path] + if len(out) == 0: + _raise_file_not_found(path, None) + return out if detail else [o["name"] for o in out] + + def _ls_tree( + self, + path: str, + recursive: bool = False, + refresh: bool = False, + revision: Optional[str] = None, + expand_info: bool = False, + maxdepth: Optional[int] = None, + ): + resolved_path = self.resolve_path(path, revision=revision) + path = resolved_path.unresolve() + root_path = HfFileSystemResolvedPath( + resolved_path.repo_type, + resolved_path.repo_id, + resolved_path.revision, + path_in_repo="", + _raw_revision=resolved_path._raw_revision, + ).unresolve() + + out = [] + if path in self.dircache and not refresh: + cached_path_infos = self.dircache[path] + out.extend(cached_path_infos) + dirs_not_in_dircache = [] + if recursive: + # Use BFS to traverse the cache and build the "recursive "output + # (The Hub uses a so-called "tree first" strategy for the tree endpoint but we sort the output to follow the spec so the result is (eventually) the same) + depth = 2 + dirs_to_visit = deque( + [(depth, path_info) for path_info in cached_path_infos if path_info["type"] == "directory"] + ) + while dirs_to_visit: + depth, dir_info = dirs_to_visit.popleft() + if maxdepth is None or depth <= maxdepth: + if dir_info["name"] not in self.dircache: + dirs_not_in_dircache.append(dir_info["name"]) + else: + cached_path_infos = self.dircache[dir_info["name"]] + out.extend(cached_path_infos) + dirs_to_visit.extend( + [ + (depth + 1, path_info) + for path_info in cached_path_infos + if path_info["type"] == "directory" + ] + ) + + dirs_not_expanded = [] + if expand_info: + # Check if there are directories with non-expanded entries + dirs_not_expanded = [self._parent(o["name"]) for o in out if o["last_commit"] is None] + + if (recursive and dirs_not_in_dircache) or (expand_info and dirs_not_expanded): + # If the dircache is incomplete, find the common path of the missing and non-expanded entries + # and extend the output with the result of `_ls_tree(common_path, recursive=True)` + common_prefix = os.path.commonprefix(dirs_not_in_dircache + dirs_not_expanded) + # Get the parent directory if the common prefix itself is not a directory + common_path = ( + common_prefix.rstrip("/") + if common_prefix.endswith("/") + or common_prefix == root_path + or common_prefix in chain(dirs_not_in_dircache, dirs_not_expanded) + else self._parent(common_prefix) + ) + if maxdepth is not None: + common_path_depth = common_path[len(path) :].count("/") + maxdepth -= common_path_depth + out = [o for o in out if not o["name"].startswith(common_path + "/")] + for cached_path in list(self.dircache): + if cached_path.startswith(common_path + "/"): + self.dircache.pop(cached_path, None) + self.dircache.pop(common_path, None) + out.extend( + self._ls_tree( + common_path, + recursive=recursive, + refresh=True, + revision=revision, + expand_info=expand_info, + maxdepth=maxdepth, + ) + ) + else: + tree = self._api.list_repo_tree( + resolved_path.repo_id, + resolved_path.path_in_repo, + recursive=recursive, + expand=expand_info, + revision=resolved_path.revision, + repo_type=resolved_path.repo_type, + ) + for path_info in tree: + cache_path = root_path + "/" + path_info.path + if isinstance(path_info, RepoFile): + cache_path_info = { + "name": cache_path, + "size": path_info.size, + "type": "file", + "blob_id": path_info.blob_id, + "lfs": path_info.lfs, + "last_commit": path_info.last_commit, + "security": path_info.security, + } + else: + cache_path_info = { + "name": cache_path, + "size": 0, + "type": "directory", + "tree_id": path_info.tree_id, + "last_commit": path_info.last_commit, + } + parent_path = self._parent(cache_path_info["name"]) + self.dircache.setdefault(parent_path, []).append(cache_path_info) + depth = cache_path[len(path) :].count("/") + if maxdepth is None or depth <= maxdepth: + out.append(cache_path_info) + return out + + def walk(self, path: str, *args, **kwargs) -> Iterator[Tuple[str, List[str], List[str]]]: + """ + Return all files below the given path. + + For more details, refer to [fsspec documentation](https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.spec.AbstractFileSystem.walk). + + Args: + path (`str`): + Root path to list files from. + + Returns: + `Iterator[Tuple[str, List[str], List[str]]]`: An iterator of (path, list of directory names, list of file names) tuples. + """ + path = self.resolve_path(path, revision=kwargs.get("revision")).unresolve() + yield from super().walk(path, *args, **kwargs) + + def glob(self, path: str, **kwargs) -> List[str]: + """ + Find files by glob-matching. + + For more details, refer to [fsspec documentation](https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.spec.AbstractFileSystem.glob). + + Args: + path (`str`): + Path pattern to match. + + Returns: + `List[str]`: List of paths matching the pattern. + """ + path = self.resolve_path(path, revision=kwargs.get("revision")).unresolve() + return super().glob(path, **kwargs) + + def find( + self, + path: str, + maxdepth: Optional[int] = None, + withdirs: bool = False, + detail: bool = False, + refresh: bool = False, + revision: Optional[str] = None, + **kwargs, + ) -> Union[List[str], Dict[str, Dict[str, Any]]]: + """ + List all files below path. + + For more details, refer to [fsspec documentation](https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.spec.AbstractFileSystem.find). + + Args: + path (`str`): + Root path to list files from. + maxdepth (`int`, *optional*): + Maximum depth to descend into subdirectories. + withdirs (`bool`, *optional*): + Include directory paths in the output. Defaults to False. + detail (`bool`, *optional*): + If True, returns a dict mapping paths to file information. Defaults to False. + refresh (`bool`, *optional*): + If True, bypass the cache and fetch the latest data. Defaults to False. + revision (`str`, *optional*): + The git revision to list from. + + Returns: + `Union[List[str], Dict[str, Dict[str, Any]]]`: List of paths or dict of file information. + """ + if maxdepth is not None and maxdepth < 1: + raise ValueError("maxdepth must be at least 1") + resolved_path = self.resolve_path(path, revision=revision) + path = resolved_path.unresolve() + try: + out = self._ls_tree( + path, recursive=True, refresh=refresh, revision=resolved_path.revision, maxdepth=maxdepth, **kwargs + ) + except EntryNotFoundError: + # Path could be a file + try: + if self.info(path, revision=revision, **kwargs)["type"] == "file": + out = {path: {}} + else: + out = {} + except FileNotFoundError: + out = {} + else: + if not withdirs: + out = [o for o in out if o["type"] != "directory"] + else: + # If `withdirs=True`, include the directory itself to be consistent with the spec + path_info = self.info(path, revision=resolved_path.revision, **kwargs) + out = [path_info] + out if path_info["type"] == "directory" else out + out = {o["name"]: o for o in out} + names = sorted(out) + if not detail: + return names + else: + return {name: out[name] for name in names} + + def cp_file(self, path1: str, path2: str, revision: Optional[str] = None, **kwargs) -> None: + """ + Copy a file within or between repositories. + + > [!WARNING] + > Note: When possible, use `HfApi.upload_file()` for better performance. + + Args: + path1 (`str`): + Source path to copy from. + path2 (`str`): + Destination path to copy to. + revision (`str`, *optional*): + The git revision to copy from. + + """ + resolved_path1 = self.resolve_path(path1, revision=revision) + resolved_path2 = self.resolve_path(path2, revision=revision) + + same_repo = ( + resolved_path1.repo_type == resolved_path2.repo_type and resolved_path1.repo_id == resolved_path2.repo_id + ) + + if same_repo: + commit_message = f"Copy {path1} to {path2}" + self._api.create_commit( + repo_id=resolved_path1.repo_id, + repo_type=resolved_path1.repo_type, + revision=resolved_path2.revision, + commit_message=kwargs.get("commit_message", commit_message), + commit_description=kwargs.get("commit_description", ""), + operations=[ + CommitOperationCopy( + src_path_in_repo=resolved_path1.path_in_repo, + path_in_repo=resolved_path2.path_in_repo, + src_revision=resolved_path1.revision, + ) + ], + ) + else: + with self.open(path1, "rb", revision=resolved_path1.revision) as f: + content = f.read() + commit_message = f"Copy {path1} to {path2}" + self._api.upload_file( + path_or_fileobj=content, + path_in_repo=resolved_path2.path_in_repo, + repo_id=resolved_path2.repo_id, + token=self.token, + repo_type=resolved_path2.repo_type, + revision=resolved_path2.revision, + commit_message=kwargs.get("commit_message", commit_message), + commit_description=kwargs.get("commit_description"), + ) + self.invalidate_cache(path=resolved_path1.unresolve()) + self.invalidate_cache(path=resolved_path2.unresolve()) + + def modified(self, path: str, **kwargs) -> datetime: + """ + Get the last modified time of a file. + + For more details, refer to [fsspec documentation](https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.spec.AbstractFileSystem.modified). + + Args: + path (`str`): + Path to the file. + + Returns: + `datetime`: Last commit date of the file. + """ + info = self.info(path, **{**kwargs, "expand_info": True}) + return info["last_commit"]["date"] + + def info(self, path: str, refresh: bool = False, revision: Optional[str] = None, **kwargs) -> Dict[str, Any]: + """ + Get information about a file or directory. + + For more details, refer to [fsspec documentation](https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.spec.AbstractFileSystem.info). + + > [!WARNING] + > Note: When possible, use `HfApi.get_paths_info()` or `HfApi.repo_info()` for better performance. + + Args: + path (`str`): + Path to get info for. + refresh (`bool`, *optional*): + If True, bypass the cache and fetch the latest data. Defaults to False. + revision (`str`, *optional*): + The git revision to get info from. + + Returns: + `Dict[str, Any]`: Dictionary containing file information (type, size, commit info, etc.). + + """ + resolved_path = self.resolve_path(path, revision=revision) + path = resolved_path.unresolve() + expand_info = kwargs.get( + "expand_info", False + ) # don't expose it as a parameter in the public API to follow the spec + if not resolved_path.path_in_repo: + # Path is the root directory + out = { + "name": path, + "size": 0, + "type": "directory", + "last_commit": None, + } + if expand_info: + last_commit = self._api.list_repo_commits( + resolved_path.repo_id, repo_type=resolved_path.repo_type, revision=resolved_path.revision + )[-1] + out = { + **out, + "tree_id": None, # TODO: tree_id of the root directory? + "last_commit": LastCommitInfo( + oid=last_commit.commit_id, title=last_commit.title, date=last_commit.created_at + ), + } + else: + out = None + parent_path = self._parent(path) + if not expand_info and parent_path not in self.dircache: + # Fill the cache with cheap call + self.ls(parent_path) + if parent_path in self.dircache: + # Check if the path is in the cache + out1 = [o for o in self.dircache[parent_path] if o["name"] == path] + if not out1: + _raise_file_not_found(path, None) + out = out1[0] + if refresh or out is None or (expand_info and out and out["last_commit"] is None): + paths_info = self._api.get_paths_info( + resolved_path.repo_id, + resolved_path.path_in_repo, + expand=expand_info, + revision=resolved_path.revision, + repo_type=resolved_path.repo_type, + ) + if not paths_info: + _raise_file_not_found(path, None) + path_info = paths_info[0] + root_path = HfFileSystemResolvedPath( + resolved_path.repo_type, + resolved_path.repo_id, + resolved_path.revision, + path_in_repo="", + _raw_revision=resolved_path._raw_revision, + ).unresolve() + if isinstance(path_info, RepoFile): + out = { + "name": root_path + "/" + path_info.path, + "size": path_info.size, + "type": "file", + "blob_id": path_info.blob_id, + "lfs": path_info.lfs, + "last_commit": path_info.last_commit, + "security": path_info.security, + } + else: + out = { + "name": root_path + "/" + path_info.path, + "size": 0, + "type": "directory", + "tree_id": path_info.tree_id, + "last_commit": path_info.last_commit, + } + if not expand_info: + out = {k: out[k] for k in ["name", "size", "type"]} + assert out is not None + return out + + def exists(self, path, **kwargs): + """ + Check if a file exists. + + For more details, refer to [fsspec documentation](https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.spec.AbstractFileSystem.exists). + + > [!WARNING] + > Note: When possible, use `HfApi.file_exists()` for better performance. + + Args: + path (`str`): + Path to check. + + Returns: + `bool`: True if file exists, False otherwise. + """ + try: + if kwargs.get("refresh", False): + self.invalidate_cache(path) + + self.info(path, **kwargs) + return True + except: # noqa: E722 + return False + + def isdir(self, path): + """ + Check if a path is a directory. + + For more details, refer to [fsspec documentation](https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.spec.AbstractFileSystem.isdir). + + Args: + path (`str`): + Path to check. + + Returns: + `bool`: True if path is a directory, False otherwise. + """ + try: + return self.info(path)["type"] == "directory" + except OSError: + return False + + def isfile(self, path): + """ + Check if a path is a file. + + For more details, refer to [fsspec documentation](https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.spec.AbstractFileSystem.isfile). + + Args: + path (`str`): + Path to check. + + Returns: + `bool`: True if path is a file, False otherwise. + """ + try: + return self.info(path)["type"] == "file" + except: # noqa: E722 + return False + + def url(self, path: str) -> str: + """ + Get the HTTP URL of the given path. + + Args: + path (`str`): + Path to get URL for. + + Returns: + `str`: HTTP URL to access the file or directory on the Hub. + """ + resolved_path = self.resolve_path(path) + url = hf_hub_url( + resolved_path.repo_id, + resolved_path.path_in_repo, + repo_type=resolved_path.repo_type, + revision=resolved_path.revision, + endpoint=self.endpoint, + ) + if self.isdir(path): + url = url.replace("/resolve/", "/tree/", 1) + return url + + def get_file(self, rpath, lpath, callback=_DEFAULT_CALLBACK, outfile=None, **kwargs) -> None: + """ + Copy single remote file to local. + + > [!WARNING] + > Note: When possible, use `HfApi.hf_hub_download()` for better performance. + + Args: + rpath (`str`): + Remote path to download from. + lpath (`str`): + Local path to download to. + callback (`Callback`, *optional*): + Optional callback to track download progress. Defaults to no callback. + outfile (`IO`, *optional*): + Optional file-like object to write to. If provided, `lpath` is ignored. + + """ + revision = kwargs.get("revision") + unhandled_kwargs = set(kwargs.keys()) - {"revision"} + if not isinstance(callback, (NoOpCallback, TqdmCallback)) or len(unhandled_kwargs) > 0: + # for now, let's not handle custom callbacks + # and let's not handle custom kwargs + return super().get_file(rpath, lpath, callback=callback, outfile=outfile, **kwargs) + + # Taken from https://github.com/fsspec/filesystem_spec/blob/47b445ae4c284a82dd15e0287b1ffc410e8fc470/fsspec/spec.py#L883 + if isfilelike(lpath): + outfile = lpath + elif self.isdir(rpath): + os.makedirs(lpath, exist_ok=True) + return None + + if isinstance(lpath, (str, Path)): # otherwise, let's assume it's a file-like object + os.makedirs(os.path.dirname(lpath), exist_ok=True) + + # Open file if not already open + close_file = False + if outfile is None: + outfile = open(lpath, "wb") + close_file = True + initial_pos = outfile.tell() + + # Custom implementation of `get_file` to use `http_get`. + resolve_remote_path = self.resolve_path(rpath, revision=revision) + expected_size = self.info(rpath, revision=revision)["size"] + callback.set_size(expected_size) + try: + http_get( + url=hf_hub_url( + repo_id=resolve_remote_path.repo_id, + revision=resolve_remote_path.revision, + filename=resolve_remote_path.path_in_repo, + repo_type=resolve_remote_path.repo_type, + endpoint=self.endpoint, + ), + temp_file=outfile, # type: ignore[arg-type] + displayed_filename=rpath, + expected_size=expected_size, + resume_size=0, + headers=self._api._build_hf_headers(), + _tqdm_bar=callback.tqdm if isinstance(callback, TqdmCallback) else None, + ) + outfile.seek(initial_pos) + finally: + # Close file only if we opened it ourselves + if close_file: + outfile.close() + + @property + def transaction(self): + """A context within which files are committed together upon exit + + Requires the file class to implement `.commit()` and `.discard()` + for the normal and exception cases. + """ + # Taken from https://github.com/fsspec/filesystem_spec/blob/3fbb6fee33b46cccb015607630843dea049d3243/fsspec/spec.py#L231 + # See https://github.com/huggingface/huggingface_hub/issues/1733 + raise NotImplementedError("Transactional commits are not supported.") + + def start_transaction(self): + """Begin write transaction for deferring files, non-context version""" + # Taken from https://github.com/fsspec/filesystem_spec/blob/3fbb6fee33b46cccb015607630843dea049d3243/fsspec/spec.py#L241 + # See https://github.com/huggingface/huggingface_hub/issues/1733 + raise NotImplementedError("Transactional commits are not supported.") + + def __reduce__(self): + # re-populate the instance cache at HfFileSystem._cache and re-populate the cache attributes of every instance + return make_instance, ( + type(self), + self.storage_args, + self.storage_options, + { + "dircache": self.dircache, + "_repo_and_revision_exists_cache": self._repo_and_revision_exists_cache, + }, + ) + + +class HfFileSystemFile(fsspec.spec.AbstractBufferedFile): + def __init__(self, fs: HfFileSystem, path: str, revision: Optional[str] = None, **kwargs): + try: + self.resolved_path = fs.resolve_path(path, revision=revision) + except FileNotFoundError as e: + if "w" in kwargs.get("mode", ""): + raise FileNotFoundError( + f"{e}.\nMake sure the repository and revision exist before writing data." + ) from e + raise + super().__init__(fs, self.resolved_path.unresolve(), **kwargs) + self.fs: HfFileSystem + + def __del__(self): + if not hasattr(self, "resolved_path"): + # Means that the constructor failed. Nothing to do. + return + return super().__del__() + + def _fetch_range(self, start: int, end: int) -> bytes: + headers = { + "range": f"bytes={start}-{end - 1}", + **self.fs._api._build_hf_headers(), + } + url = hf_hub_url( + repo_id=self.resolved_path.repo_id, + revision=self.resolved_path.revision, + filename=self.resolved_path.path_in_repo, + repo_type=self.resolved_path.repo_type, + endpoint=self.fs.endpoint, + ) + r = http_backoff("GET", url, headers=headers, timeout=constants.HF_HUB_DOWNLOAD_TIMEOUT) + hf_raise_for_status(r) + return r.content + + def _initiate_upload(self) -> None: + self.temp_file = tempfile.NamedTemporaryFile(prefix="hffs-", delete=False) + + def _upload_chunk(self, final: bool = False) -> None: + self.buffer.seek(0) + block = self.buffer.read() + self.temp_file.write(block) + if final: + self.temp_file.close() + self.fs._api.upload_file( + path_or_fileobj=self.temp_file.name, + path_in_repo=self.resolved_path.path_in_repo, + repo_id=self.resolved_path.repo_id, + token=self.fs.token, + repo_type=self.resolved_path.repo_type, + revision=self.resolved_path.revision, + commit_message=self.kwargs.get("commit_message"), + commit_description=self.kwargs.get("commit_description"), + ) + os.remove(self.temp_file.name) + self.fs.invalidate_cache( + path=self.resolved_path.unresolve(), + ) + + def read(self, length=-1): + """Read remote file. + + If `length` is not provided or is -1, the entire file is downloaded and read. On POSIX systems and if + `hf_transfer` is not enabled, the file is loaded in memory directly. Otherwise, the file is downloaded to a + temporary file and read from there. + """ + if self.mode == "rb" and (length is None or length == -1) and self.loc == 0: + with self.fs.open(self.path, "rb", block_size=0) as f: # block_size=0 enables fast streaming + out = f.read() + self.loc += len(out) + return out + return super().read(length) + + def url(self) -> str: + return self.fs.url(self.path) + + +class HfFileSystemStreamFile(fsspec.spec.AbstractBufferedFile): + def __init__( + self, + fs: HfFileSystem, + path: str, + mode: str = "rb", + revision: Optional[str] = None, + block_size: int = 0, + cache_type: str = "none", + **kwargs, + ): + if block_size != 0: + raise ValueError(f"HfFileSystemStreamFile only supports block_size=0 but got {block_size}") + if cache_type != "none": + raise ValueError(f"HfFileSystemStreamFile only supports cache_type='none' but got {cache_type}") + if "w" in mode: + raise ValueError(f"HfFileSystemStreamFile only supports reading but got mode='{mode}'") + try: + self.resolved_path = fs.resolve_path(path, revision=revision) + except FileNotFoundError as e: + if "w" in kwargs.get("mode", ""): + raise FileNotFoundError( + f"{e}.\nMake sure the repository and revision exist before writing data." + ) from e + # avoid an unnecessary .info() call to instantiate .details + self.details = {"name": self.resolved_path.unresolve(), "size": None} + super().__init__( + fs, self.resolved_path.unresolve(), mode=mode, block_size=block_size, cache_type=cache_type, **kwargs + ) + self.response: Optional[Response] = None + self.fs: HfFileSystem + + def seek(self, loc: int, whence: int = 0): + if loc == 0 and whence == 1: + return + if loc == self.loc and whence == 0: + return + raise ValueError("Cannot seek streaming HF file") + + def read(self, length: int = -1): + read_args = (length,) if length >= 0 else () + if self.response is None: + url = hf_hub_url( + repo_id=self.resolved_path.repo_id, + revision=self.resolved_path.revision, + filename=self.resolved_path.path_in_repo, + repo_type=self.resolved_path.repo_type, + endpoint=self.fs.endpoint, + ) + self.response = http_backoff( + "GET", + url, + headers=self.fs._api._build_hf_headers(), + stream=True, + timeout=constants.HF_HUB_DOWNLOAD_TIMEOUT, + ) + hf_raise_for_status(self.response) + try: + self.response.raw.decode_content = True + out = self.response.raw.read(*read_args) + except Exception: + self.response.close() + + # Retry by recreating the connection + url = hf_hub_url( + repo_id=self.resolved_path.repo_id, + revision=self.resolved_path.revision, + filename=self.resolved_path.path_in_repo, + repo_type=self.resolved_path.repo_type, + endpoint=self.fs.endpoint, + ) + self.response = http_backoff( + "GET", + url, + headers={"Range": "bytes=%d-" % self.loc, **self.fs._api._build_hf_headers()}, + stream=True, + timeout=constants.HF_HUB_DOWNLOAD_TIMEOUT, + ) + hf_raise_for_status(self.response) + try: + self.response.raw.decode_content = True + out = self.response.raw.read(*read_args) + except Exception: + self.response.close() + raise + self.loc += len(out) + return out + + def url(self) -> str: + return self.fs.url(self.path) + + def __del__(self): + if not hasattr(self, "resolved_path"): + # Means that the constructor failed. Nothing to do. + return + return super().__del__() + + def __reduce__(self): + return reopen, (self.fs, self.path, self.mode, self.blocksize, self.cache.name) + + +def safe_revision(revision: str) -> str: + return revision if SPECIAL_REFS_REVISION_REGEX.match(revision) else safe_quote(revision) + + +def safe_quote(s: str) -> str: + return quote(s, safe="") + + +def _raise_file_not_found(path: str, err: Optional[Exception]) -> NoReturn: + msg = path + if isinstance(err, RepositoryNotFoundError): + msg = f"{path} (repository not found)" + elif isinstance(err, RevisionNotFoundError): + msg = f"{path} (revision not found)" + elif isinstance(err, HFValidationError): + msg = f"{path} (invalid repository id)" + raise FileNotFoundError(msg) from err + + +def reopen(fs: HfFileSystem, path: str, mode: str, block_size: int, cache_type: str): + return fs.open(path, mode=mode, block_size=block_size, cache_type=cache_type) + + +def make_instance(cls, args, kwargs, instance_cache_attributes_dict): + fs = cls(*args, **kwargs) + for attr, cached_value in instance_cache_attributes_dict.items(): + setattr(fs, attr, cached_value) + return fs diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/hub_mixin.py b/venv/lib/python3.13/site-packages/huggingface_hub/hub_mixin.py new file mode 100644 index 0000000000000000000000000000000000000000..9fa702ceda97318a817cb1a325223e26a78e2710 --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/hub_mixin.py @@ -0,0 +1,853 @@ +import inspect +import json +import os +from dataclasses import Field, asdict, dataclass, is_dataclass +from pathlib import Path +from typing import Any, Callable, ClassVar, Dict, List, Optional, Protocol, Tuple, Type, TypeVar, Union + +import packaging.version + +from . import constants +from .errors import EntryNotFoundError, HfHubHTTPError +from .file_download import hf_hub_download +from .hf_api import HfApi +from .repocard import ModelCard, ModelCardData +from .utils import ( + SoftTemporaryDirectory, + is_jsonable, + is_safetensors_available, + is_simple_optional_type, + is_torch_available, + logging, + unwrap_simple_optional_type, + validate_hf_hub_args, +) + + +if is_torch_available(): + import torch # type: ignore + +if is_safetensors_available(): + import safetensors + from safetensors.torch import load_model as load_model_as_safetensor + from safetensors.torch import save_model as save_model_as_safetensor + + +logger = logging.get_logger(__name__) + + +# Type alias for dataclass instances, copied from https://github.com/python/typeshed/blob/9f28171658b9ca6c32a7cb93fbb99fc92b17858b/stdlib/_typeshed/__init__.pyi#L349 +class DataclassInstance(Protocol): + __dataclass_fields__: ClassVar[Dict[str, Field]] + + +# Generic variable that is either ModelHubMixin or a subclass thereof +T = TypeVar("T", bound="ModelHubMixin") +# Generic variable to represent an args type +ARGS_T = TypeVar("ARGS_T") +ENCODER_T = Callable[[ARGS_T], Any] +DECODER_T = Callable[[Any], ARGS_T] +CODER_T = Tuple[ENCODER_T, DECODER_T] + + +DEFAULT_MODEL_CARD = """ +--- +# For reference on model card metadata, see the spec: https://github.com/huggingface/hub-docs/blob/main/modelcard.md?plain=1 +# Doc / guide: https://huggingface.co/docs/hub/model-cards +{{ card_data }} +--- + +This model has been pushed to the Hub using the [PytorchModelHubMixin](https://huggingface.co/docs/huggingface_hub/package_reference/mixins#huggingface_hub.PyTorchModelHubMixin) integration: +- Code: {{ repo_url | default("[More Information Needed]", true) }} +- Paper: {{ paper_url | default("[More Information Needed]", true) }} +- Docs: {{ docs_url | default("[More Information Needed]", true) }} +""" + + +@dataclass +class MixinInfo: + model_card_template: str + model_card_data: ModelCardData + docs_url: Optional[str] = None + paper_url: Optional[str] = None + repo_url: Optional[str] = None + + +class ModelHubMixin: + """ + A generic mixin to integrate ANY machine learning framework with the Hub. + + To integrate your framework, your model class must inherit from this class. Custom logic for saving/loading models + have to be overwritten in [`_from_pretrained`] and [`_save_pretrained`]. [`PyTorchModelHubMixin`] is a good example + of mixin integration with the Hub. Check out our [integration guide](../guides/integrations) for more instructions. + + When inheriting from [`ModelHubMixin`], you can define class-level attributes. These attributes are not passed to + `__init__` but to the class definition itself. This is useful to define metadata about the library integrating + [`ModelHubMixin`]. + + For more details on how to integrate the mixin with your library, checkout the [integration guide](../guides/integrations). + + Args: + repo_url (`str`, *optional*): + URL of the library repository. Used to generate model card. + paper_url (`str`, *optional*): + URL of the library paper. Used to generate model card. + docs_url (`str`, *optional*): + URL of the library documentation. Used to generate model card. + model_card_template (`str`, *optional*): + Template of the model card. Used to generate model card. Defaults to a generic template. + language (`str` or `List[str]`, *optional*): + Language supported by the library. Used to generate model card. + library_name (`str`, *optional*): + Name of the library integrating ModelHubMixin. Used to generate model card. + license (`str`, *optional*): + License of the library integrating ModelHubMixin. Used to generate model card. + E.g: "apache-2.0" + license_name (`str`, *optional*): + Name of the library integrating ModelHubMixin. Used to generate model card. + Only used if `license` is set to `other`. + E.g: "coqui-public-model-license". + license_link (`str`, *optional*): + URL to the license of the library integrating ModelHubMixin. Used to generate model card. + Only used if `license` is set to `other` and `license_name` is set. + E.g: "https://coqui.ai/cpml". + pipeline_tag (`str`, *optional*): + Tag of the pipeline. Used to generate model card. E.g. "text-classification". + tags (`List[str]`, *optional*): + Tags to be added to the model card. Used to generate model card. E.g. ["computer-vision"] + coders (`Dict[Type, Tuple[Callable, Callable]]`, *optional*): + Dictionary of custom types and their encoders/decoders. Used to encode/decode arguments that are not + jsonable by default. E.g dataclasses, argparse.Namespace, OmegaConf, etc. + + Example: + + ```python + >>> from huggingface_hub import ModelHubMixin + + # Inherit from ModelHubMixin + >>> class MyCustomModel( + ... ModelHubMixin, + ... library_name="my-library", + ... tags=["computer-vision"], + ... repo_url="https://github.com/huggingface/my-cool-library", + ... paper_url="https://arxiv.org/abs/2304.12244", + ... docs_url="https://huggingface.co/docs/my-cool-library", + ... # ^ optional metadata to generate model card + ... ): + ... def __init__(self, size: int = 512, device: str = "cpu"): + ... # define how to initialize your model + ... super().__init__() + ... ... + ... + ... def _save_pretrained(self, save_directory: Path) -> None: + ... # define how to serialize your model + ... ... + ... + ... @classmethod + ... def from_pretrained( + ... cls: Type[T], + ... pretrained_model_name_or_path: Union[str, Path], + ... *, + ... force_download: bool = False, + ... resume_download: Optional[bool] = None, + ... proxies: Optional[Dict] = None, + ... token: Optional[Union[str, bool]] = None, + ... cache_dir: Optional[Union[str, Path]] = None, + ... local_files_only: bool = False, + ... revision: Optional[str] = None, + ... **model_kwargs, + ... ) -> T: + ... # define how to deserialize your model + ... ... + + >>> model = MyCustomModel(size=256, device="gpu") + + # Save model weights to local directory + >>> model.save_pretrained("my-awesome-model") + + # Push model weights to the Hub + >>> model.push_to_hub("my-awesome-model") + + # Download and initialize weights from the Hub + >>> reloaded_model = MyCustomModel.from_pretrained("username/my-awesome-model") + >>> reloaded_model.size + 256 + + # Model card has been correctly populated + >>> from huggingface_hub import ModelCard + >>> card = ModelCard.load("username/my-awesome-model") + >>> card.data.tags + ["x-custom-tag", "pytorch_model_hub_mixin", "model_hub_mixin"] + >>> card.data.library_name + "my-library" + ``` + """ + + _hub_mixin_config: Optional[Union[dict, DataclassInstance]] = None + # ^ optional config attribute automatically set in `from_pretrained` + _hub_mixin_info: MixinInfo + # ^ information about the library integrating ModelHubMixin (used to generate model card) + _hub_mixin_inject_config: bool # whether `_from_pretrained` expects `config` or not + _hub_mixin_init_parameters: Dict[str, inspect.Parameter] # __init__ parameters + _hub_mixin_jsonable_default_values: Dict[str, Any] # default values for __init__ parameters + _hub_mixin_jsonable_custom_types: Tuple[Type, ...] # custom types that can be encoded/decoded + _hub_mixin_coders: Dict[Type, CODER_T] # encoders/decoders for custom types + # ^ internal values to handle config + + def __init_subclass__( + cls, + *, + # Generic info for model card + repo_url: Optional[str] = None, + paper_url: Optional[str] = None, + docs_url: Optional[str] = None, + # Model card template + model_card_template: str = DEFAULT_MODEL_CARD, + # Model card metadata + language: Optional[List[str]] = None, + library_name: Optional[str] = None, + license: Optional[str] = None, + license_name: Optional[str] = None, + license_link: Optional[str] = None, + pipeline_tag: Optional[str] = None, + tags: Optional[List[str]] = None, + # How to encode/decode arguments with custom type into a JSON config? + coders: Optional[ + Dict[Type, CODER_T] + # Key is a type. + # Value is a tuple (encoder, decoder). + # Example: {MyCustomType: (lambda x: x.value, lambda data: MyCustomType(data))} + ] = None, + ) -> None: + """Inspect __init__ signature only once when subclassing + handle modelcard.""" + super().__init_subclass__() + + # Will be reused when creating modelcard + tags = tags or [] + tags.append("model_hub_mixin") + + # Initialize MixinInfo if not existent + info = MixinInfo(model_card_template=model_card_template, model_card_data=ModelCardData()) + + # If parent class has a MixinInfo, inherit from it as a copy + if hasattr(cls, "_hub_mixin_info"): + # Inherit model card template from parent class if not explicitly set + if model_card_template == DEFAULT_MODEL_CARD: + info.model_card_template = cls._hub_mixin_info.model_card_template + + # Inherit from parent model card data + info.model_card_data = ModelCardData(**cls._hub_mixin_info.model_card_data.to_dict()) + + # Inherit other info + info.docs_url = cls._hub_mixin_info.docs_url + info.paper_url = cls._hub_mixin_info.paper_url + info.repo_url = cls._hub_mixin_info.repo_url + cls._hub_mixin_info = info + + # Update MixinInfo with metadata + if model_card_template is not None and model_card_template != DEFAULT_MODEL_CARD: + info.model_card_template = model_card_template + if repo_url is not None: + info.repo_url = repo_url + if paper_url is not None: + info.paper_url = paper_url + if docs_url is not None: + info.docs_url = docs_url + if language is not None: + info.model_card_data.language = language + if library_name is not None: + info.model_card_data.library_name = library_name + if license is not None: + info.model_card_data.license = license + if license_name is not None: + info.model_card_data.license_name = license_name + if license_link is not None: + info.model_card_data.license_link = license_link + if pipeline_tag is not None: + info.model_card_data.pipeline_tag = pipeline_tag + if tags is not None: + normalized_tags = list(tags) + if info.model_card_data.tags is not None: + info.model_card_data.tags.extend(normalized_tags) + else: + info.model_card_data.tags = normalized_tags + + if info.model_card_data.tags is not None: + info.model_card_data.tags = sorted(set(info.model_card_data.tags)) + + # Handle encoders/decoders for args + cls._hub_mixin_coders = coders or {} + cls._hub_mixin_jsonable_custom_types = tuple(cls._hub_mixin_coders.keys()) + + # Inspect __init__ signature to handle config + cls._hub_mixin_init_parameters = dict(inspect.signature(cls.__init__).parameters) + cls._hub_mixin_jsonable_default_values = { + param.name: cls._encode_arg(param.default) + for param in cls._hub_mixin_init_parameters.values() + if param.default is not inspect.Parameter.empty and cls._is_jsonable(param.default) + } + cls._hub_mixin_inject_config = "config" in inspect.signature(cls._from_pretrained).parameters + + def __new__(cls: Type[T], *args, **kwargs) -> T: + """Create a new instance of the class and handle config. + + 3 cases: + - If `self._hub_mixin_config` is already set, do nothing. + - If `config` is passed as a dataclass, set it as `self._hub_mixin_config`. + - Otherwise, build `self._hub_mixin_config` from default values and passed values. + """ + instance = super().__new__(cls) + + # If `config` is already set, return early + if instance._hub_mixin_config is not None: + return instance + + # Infer passed values + passed_values = { + **{ + key: value + for key, value in zip( + # [1:] to skip `self` parameter + list(cls._hub_mixin_init_parameters)[1:], + args, + ) + }, + **kwargs, + } + + # If config passed as dataclass => set it and return early + if is_dataclass(passed_values.get("config")): + instance._hub_mixin_config = passed_values["config"] + return instance + + # Otherwise, build config from default + passed values + init_config = { + # default values + **cls._hub_mixin_jsonable_default_values, + # passed values + **{ + key: cls._encode_arg(value) # Encode custom types as jsonable value + for key, value in passed_values.items() + if instance._is_jsonable(value) # Only if jsonable or we have a custom encoder + }, + } + passed_config = init_config.pop("config", {}) + + # Populate `init_config` with provided config + if isinstance(passed_config, dict): + init_config.update(passed_config) + + # Set `config` attribute and return + if init_config != {}: + instance._hub_mixin_config = init_config + return instance + + @classmethod + def _is_jsonable(cls, value: Any) -> bool: + """Check if a value is JSON serializable.""" + if is_dataclass(value): + return True + if isinstance(value, cls._hub_mixin_jsonable_custom_types): + return True + return is_jsonable(value) + + @classmethod + def _encode_arg(cls, arg: Any) -> Any: + """Encode an argument into a JSON serializable format.""" + if is_dataclass(arg): + return asdict(arg) # type: ignore[arg-type] + for type_, (encoder, _) in cls._hub_mixin_coders.items(): + if isinstance(arg, type_): + if arg is None: + return None + return encoder(arg) + return arg + + @classmethod + def _decode_arg(cls, expected_type: Type[ARGS_T], value: Any) -> Optional[ARGS_T]: + """Decode a JSON serializable value into an argument.""" + if is_simple_optional_type(expected_type): + if value is None: + return None + expected_type = unwrap_simple_optional_type(expected_type) + # Dataclass => handle it + if is_dataclass(expected_type): + return _load_dataclass(expected_type, value) # type: ignore[return-value] + # Otherwise => check custom decoders + for type_, (_, decoder) in cls._hub_mixin_coders.items(): + if inspect.isclass(expected_type) and issubclass(expected_type, type_): + return decoder(value) + # Otherwise => don't decode + return value + + def save_pretrained( + self, + save_directory: Union[str, Path], + *, + config: Optional[Union[dict, DataclassInstance]] = None, + repo_id: Optional[str] = None, + push_to_hub: bool = False, + model_card_kwargs: Optional[Dict[str, Any]] = None, + **push_to_hub_kwargs, + ) -> Optional[str]: + """ + Save weights in local directory. + + Args: + save_directory (`str` or `Path`): + Path to directory in which the model weights and configuration will be saved. + config (`dict` or `DataclassInstance`, *optional*): + Model configuration specified as a key/value dictionary or a dataclass instance. + push_to_hub (`bool`, *optional*, defaults to `False`): + Whether or not to push your model to the Huggingface Hub after saving it. + repo_id (`str`, *optional*): + ID of your repository on the Hub. Used only if `push_to_hub=True`. Will default to the folder name if + not provided. + model_card_kwargs (`Dict[str, Any]`, *optional*): + Additional arguments passed to the model card template to customize the model card. + push_to_hub_kwargs: + Additional key word arguments passed along to the [`~ModelHubMixin.push_to_hub`] method. + Returns: + `str` or `None`: url of the commit on the Hub if `push_to_hub=True`, `None` otherwise. + """ + save_directory = Path(save_directory) + save_directory.mkdir(parents=True, exist_ok=True) + + # Remove config.json if already exists. After `_save_pretrained` we don't want to overwrite config.json + # as it might have been saved by the custom `_save_pretrained` already. However we do want to overwrite + # an existing config.json if it was not saved by `_save_pretrained`. + config_path = save_directory / constants.CONFIG_NAME + config_path.unlink(missing_ok=True) + + # save model weights/files (framework-specific) + self._save_pretrained(save_directory) + + # save config (if provided and if not serialized yet in `_save_pretrained`) + if config is None: + config = self._hub_mixin_config + if config is not None: + if is_dataclass(config): + config = asdict(config) # type: ignore[arg-type] + if not config_path.exists(): + config_str = json.dumps(config, sort_keys=True, indent=2) + config_path.write_text(config_str) + + # save model card + model_card_path = save_directory / "README.md" + model_card_kwargs = model_card_kwargs if model_card_kwargs is not None else {} + if not model_card_path.exists(): # do not overwrite if already exists + self.generate_model_card(**model_card_kwargs).save(save_directory / "README.md") + + # push to the Hub if required + if push_to_hub: + kwargs = push_to_hub_kwargs.copy() # soft-copy to avoid mutating input + if config is not None: # kwarg for `push_to_hub` + kwargs["config"] = config + if repo_id is None: + repo_id = save_directory.name # Defaults to `save_directory` name + return self.push_to_hub(repo_id=repo_id, model_card_kwargs=model_card_kwargs, **kwargs) + return None + + def _save_pretrained(self, save_directory: Path) -> None: + """ + Overwrite this method in subclass to define how to save your model. + Check out our [integration guide](../guides/integrations) for instructions. + + Args: + save_directory (`str` or `Path`): + Path to directory in which the model weights and configuration will be saved. + """ + raise NotImplementedError + + @classmethod + @validate_hf_hub_args + def from_pretrained( + cls: Type[T], + pretrained_model_name_or_path: Union[str, Path], + *, + force_download: bool = False, + resume_download: Optional[bool] = None, + proxies: Optional[Dict] = None, + token: Optional[Union[str, bool]] = None, + cache_dir: Optional[Union[str, Path]] = None, + local_files_only: bool = False, + revision: Optional[str] = None, + **model_kwargs, + ) -> T: + """ + Download a model from the Huggingface Hub and instantiate it. + + Args: + pretrained_model_name_or_path (`str`, `Path`): + - Either the `model_id` (string) of a model hosted on the Hub, e.g. `bigscience/bloom`. + - Or a path to a `directory` containing model weights saved using + [`~transformers.PreTrainedModel.save_pretrained`], e.g., `../path/to/my_model_directory/`. + revision (`str`, *optional*): + Revision of the model on the Hub. Can be a branch name, a git tag or any commit id. + Defaults to the latest commit on `main` branch. + force_download (`bool`, *optional*, defaults to `False`): + Whether to force (re-)downloading the model weights and configuration files from the Hub, overriding + the existing cache. + proxies (`Dict[str, str]`, *optional*): + A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128', + 'http://hostname': 'foo.bar:4012'}`. The proxies are used on every request. + token (`str` or `bool`, *optional*): + The token to use as HTTP bearer authorization for remote files. By default, it will use the token + cached when running `hf auth login`. + cache_dir (`str`, `Path`, *optional*): + Path to the folder where cached files are stored. + local_files_only (`bool`, *optional*, defaults to `False`): + If `True`, avoid downloading the file and return the path to the local cached file if it exists. + model_kwargs (`Dict`, *optional*): + Additional kwargs to pass to the model during initialization. + """ + model_id = str(pretrained_model_name_or_path) + config_file: Optional[str] = None + if os.path.isdir(model_id): + if constants.CONFIG_NAME in os.listdir(model_id): + config_file = os.path.join(model_id, constants.CONFIG_NAME) + else: + logger.warning(f"{constants.CONFIG_NAME} not found in {Path(model_id).resolve()}") + else: + try: + config_file = hf_hub_download( + repo_id=model_id, + filename=constants.CONFIG_NAME, + revision=revision, + cache_dir=cache_dir, + force_download=force_download, + proxies=proxies, + resume_download=resume_download, + token=token, + local_files_only=local_files_only, + ) + except HfHubHTTPError as e: + logger.info(f"{constants.CONFIG_NAME} not found on the HuggingFace Hub: {str(e)}") + + # Read config + config = None + if config_file is not None: + with open(config_file, "r", encoding="utf-8") as f: + config = json.load(f) + + # Decode custom types in config + for key, value in config.items(): + if key in cls._hub_mixin_init_parameters: + expected_type = cls._hub_mixin_init_parameters[key].annotation + if expected_type is not inspect.Parameter.empty: + config[key] = cls._decode_arg(expected_type, value) + + # Populate model_kwargs from config + for param in cls._hub_mixin_init_parameters.values(): + if param.name not in model_kwargs and param.name in config: + model_kwargs[param.name] = config[param.name] + + # Check if `config` argument was passed at init + if "config" in cls._hub_mixin_init_parameters and "config" not in model_kwargs: + # Decode `config` argument if it was passed + config_annotation = cls._hub_mixin_init_parameters["config"].annotation + config = cls._decode_arg(config_annotation, config) + + # Forward config to model initialization + model_kwargs["config"] = config + + # Inject config if `**kwargs` are expected + if is_dataclass(cls): + for key in cls.__dataclass_fields__: + if key not in model_kwargs and key in config: + model_kwargs[key] = config[key] + elif any(param.kind == inspect.Parameter.VAR_KEYWORD for param in cls._hub_mixin_init_parameters.values()): + for key, value in config.items(): + if key not in model_kwargs: + model_kwargs[key] = value + + # Finally, also inject if `_from_pretrained` expects it + if cls._hub_mixin_inject_config and "config" not in model_kwargs: + model_kwargs["config"] = config + + instance = cls._from_pretrained( + model_id=str(model_id), + revision=revision, + cache_dir=cache_dir, + force_download=force_download, + proxies=proxies, + resume_download=resume_download, + local_files_only=local_files_only, + token=token, + **model_kwargs, + ) + + # Implicitly set the config as instance attribute if not already set by the class + # This way `config` will be available when calling `save_pretrained` or `push_to_hub`. + if config is not None and (getattr(instance, "_hub_mixin_config", None) in (None, {})): + instance._hub_mixin_config = config + + return instance + + @classmethod + def _from_pretrained( + cls: Type[T], + *, + model_id: str, + revision: Optional[str], + cache_dir: Optional[Union[str, Path]], + force_download: bool, + proxies: Optional[Dict], + resume_download: Optional[bool], + local_files_only: bool, + token: Optional[Union[str, bool]], + **model_kwargs, + ) -> T: + """Overwrite this method in subclass to define how to load your model from pretrained. + + Use [`hf_hub_download`] or [`snapshot_download`] to download files from the Hub before loading them. Most + args taken as input can be directly passed to those 2 methods. If needed, you can add more arguments to this + method using "model_kwargs". For example [`PyTorchModelHubMixin._from_pretrained`] takes as input a `map_location` + parameter to set on which device the model should be loaded. + + Check out our [integration guide](../guides/integrations) for more instructions. + + Args: + model_id (`str`): + ID of the model to load from the Huggingface Hub (e.g. `bigscience/bloom`). + revision (`str`, *optional*): + Revision of the model on the Hub. Can be a branch name, a git tag or any commit id. Defaults to the + latest commit on `main` branch. + force_download (`bool`, *optional*, defaults to `False`): + Whether to force (re-)downloading the model weights and configuration files from the Hub, overriding + the existing cache. + proxies (`Dict[str, str]`, *optional*): + A dictionary of proxy servers to use by protocol or endpoint (e.g., `{'http': 'foo.bar:3128', + 'http://hostname': 'foo.bar:4012'}`). + token (`str` or `bool`, *optional*): + The token to use as HTTP bearer authorization for remote files. By default, it will use the token + cached when running `hf auth login`. + cache_dir (`str`, `Path`, *optional*): + Path to the folder where cached files are stored. + local_files_only (`bool`, *optional*, defaults to `False`): + If `True`, avoid downloading the file and return the path to the local cached file if it exists. + model_kwargs: + Additional keyword arguments passed along to the [`~ModelHubMixin._from_pretrained`] method. + """ + raise NotImplementedError + + @validate_hf_hub_args + def push_to_hub( + self, + repo_id: str, + *, + config: Optional[Union[dict, DataclassInstance]] = None, + commit_message: str = "Push model using huggingface_hub.", + private: Optional[bool] = None, + token: Optional[str] = None, + branch: Optional[str] = None, + create_pr: Optional[bool] = None, + allow_patterns: Optional[Union[List[str], str]] = None, + ignore_patterns: Optional[Union[List[str], str]] = None, + delete_patterns: Optional[Union[List[str], str]] = None, + model_card_kwargs: Optional[Dict[str, Any]] = None, + ) -> str: + """ + Upload model checkpoint to the Hub. + + Use `allow_patterns` and `ignore_patterns` to precisely filter which files should be pushed to the hub. Use + `delete_patterns` to delete existing remote files in the same commit. See [`upload_folder`] reference for more + details. + + Args: + repo_id (`str`): + ID of the repository to push to (example: `"username/my-model"`). + config (`dict` or `DataclassInstance`, *optional*): + Model configuration specified as a key/value dictionary or a dataclass instance. + commit_message (`str`, *optional*): + Message to commit while pushing. + private (`bool`, *optional*): + Whether the repository created should be private. + If `None` (default), the repo will be public unless the organization's default is private. + token (`str`, *optional*): + The token to use as HTTP bearer authorization for remote files. By default, it will use the token + cached when running `hf auth login`. + branch (`str`, *optional*): + The git branch on which to push the model. This defaults to `"main"`. + create_pr (`boolean`, *optional*): + Whether or not to create a Pull Request from `branch` with that commit. Defaults to `False`. + allow_patterns (`List[str]` or `str`, *optional*): + If provided, only files matching at least one pattern are pushed. + ignore_patterns (`List[str]` or `str`, *optional*): + If provided, files matching any of the patterns are not pushed. + delete_patterns (`List[str]` or `str`, *optional*): + If provided, remote files matching any of the patterns will be deleted from the repo. + model_card_kwargs (`Dict[str, Any]`, *optional*): + Additional arguments passed to the model card template to customize the model card. + + Returns: + The url of the commit of your model in the given repository. + """ + api = HfApi(token=token) + repo_id = api.create_repo(repo_id=repo_id, private=private, exist_ok=True).repo_id + + # Push the files to the repo in a single commit + with SoftTemporaryDirectory() as tmp: + saved_path = Path(tmp) / repo_id + self.save_pretrained(saved_path, config=config, model_card_kwargs=model_card_kwargs) + return api.upload_folder( + repo_id=repo_id, + repo_type="model", + folder_path=saved_path, + commit_message=commit_message, + revision=branch, + create_pr=create_pr, + allow_patterns=allow_patterns, + ignore_patterns=ignore_patterns, + delete_patterns=delete_patterns, + ) + + def generate_model_card(self, *args, **kwargs) -> ModelCard: + card = ModelCard.from_template( + card_data=self._hub_mixin_info.model_card_data, + template_str=self._hub_mixin_info.model_card_template, + repo_url=self._hub_mixin_info.repo_url, + paper_url=self._hub_mixin_info.paper_url, + docs_url=self._hub_mixin_info.docs_url, + **kwargs, + ) + return card + + +class PyTorchModelHubMixin(ModelHubMixin): + """ + Implementation of [`ModelHubMixin`] to provide model Hub upload/download capabilities to PyTorch models. The model + is set in evaluation mode by default using `model.eval()` (dropout modules are deactivated). To train the model, + you should first set it back in training mode with `model.train()`. + + See [`ModelHubMixin`] for more details on how to use the mixin. + + Example: + + ```python + >>> import torch + >>> import torch.nn as nn + >>> from huggingface_hub import PyTorchModelHubMixin + + >>> class MyModel( + ... nn.Module, + ... PyTorchModelHubMixin, + ... library_name="keras-nlp", + ... repo_url="https://github.com/keras-team/keras-nlp", + ... paper_url="https://arxiv.org/abs/2304.12244", + ... docs_url="https://keras.io/keras_nlp/", + ... # ^ optional metadata to generate model card + ... ): + ... def __init__(self, hidden_size: int = 512, vocab_size: int = 30000, output_size: int = 4): + ... super().__init__() + ... self.param = nn.Parameter(torch.rand(hidden_size, vocab_size)) + ... self.linear = nn.Linear(output_size, vocab_size) + + ... def forward(self, x): + ... return self.linear(x + self.param) + >>> model = MyModel(hidden_size=256) + + # Save model weights to local directory + >>> model.save_pretrained("my-awesome-model") + + # Push model weights to the Hub + >>> model.push_to_hub("my-awesome-model") + + # Download and initialize weights from the Hub + >>> model = MyModel.from_pretrained("username/my-awesome-model") + >>> model.hidden_size + 256 + ``` + """ + + def __init_subclass__(cls, *args, tags: Optional[List[str]] = None, **kwargs) -> None: + tags = tags or [] + tags.append("pytorch_model_hub_mixin") + kwargs["tags"] = tags + return super().__init_subclass__(*args, **kwargs) + + def _save_pretrained(self, save_directory: Path) -> None: + """Save weights from a Pytorch model to a local directory.""" + model_to_save = self.module if hasattr(self, "module") else self # type: ignore + save_model_as_safetensor(model_to_save, str(save_directory / constants.SAFETENSORS_SINGLE_FILE)) # type: ignore [arg-type] + + @classmethod + def _from_pretrained( + cls, + *, + model_id: str, + revision: Optional[str], + cache_dir: Optional[Union[str, Path]], + force_download: bool, + proxies: Optional[Dict], + resume_download: Optional[bool], + local_files_only: bool, + token: Union[str, bool, None], + map_location: str = "cpu", + strict: bool = False, + **model_kwargs, + ): + """Load Pytorch pretrained weights and return the loaded model.""" + model = cls(**model_kwargs) + if os.path.isdir(model_id): + print("Loading weights from local directory") + model_file = os.path.join(model_id, constants.SAFETENSORS_SINGLE_FILE) + return cls._load_as_safetensor(model, model_file, map_location, strict) + else: + try: + model_file = hf_hub_download( + repo_id=model_id, + filename=constants.SAFETENSORS_SINGLE_FILE, + revision=revision, + cache_dir=cache_dir, + force_download=force_download, + proxies=proxies, + resume_download=resume_download, + token=token, + local_files_only=local_files_only, + ) + return cls._load_as_safetensor(model, model_file, map_location, strict) + except EntryNotFoundError: + model_file = hf_hub_download( + repo_id=model_id, + filename=constants.PYTORCH_WEIGHTS_NAME, + revision=revision, + cache_dir=cache_dir, + force_download=force_download, + proxies=proxies, + resume_download=resume_download, + token=token, + local_files_only=local_files_only, + ) + return cls._load_as_pickle(model, model_file, map_location, strict) + + @classmethod + def _load_as_pickle(cls, model: T, model_file: str, map_location: str, strict: bool) -> T: + state_dict = torch.load(model_file, map_location=torch.device(map_location), weights_only=True) + model.load_state_dict(state_dict, strict=strict) # type: ignore + model.eval() # type: ignore + return model + + @classmethod + def _load_as_safetensor(cls, model: T, model_file: str, map_location: str, strict: bool) -> T: + if packaging.version.parse(safetensors.__version__) < packaging.version.parse("0.4.3"): # type: ignore [attr-defined] + load_model_as_safetensor(model, model_file, strict=strict) # type: ignore [arg-type] + if map_location != "cpu": + logger.warning( + "Loading model weights on other devices than 'cpu' is not supported natively in your version of safetensors." + " This means that the model is loaded on 'cpu' first and then copied to the device." + " This leads to a slower loading time." + " Please update safetensors to version 0.4.3 or above for improved performance." + ) + model.to(map_location) # type: ignore [attr-defined] + else: + safetensors.torch.load_model(model, model_file, strict=strict, device=map_location) # type: ignore [arg-type] + return model + + +def _load_dataclass(datacls: Type[DataclassInstance], data: dict) -> DataclassInstance: + """Load a dataclass instance from a dictionary. + + Fields not expected by the dataclass are ignored. + """ + return datacls(**{k: v for k, v in data.items() if k in datacls.__dataclass_fields__}) diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/inference_api.py b/venv/lib/python3.13/site-packages/huggingface_hub/inference_api.py new file mode 100644 index 0000000000000000000000000000000000000000..f895fcc61c3867838b013ecd3f6789cbc010b5b3 --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/inference_api.py @@ -0,0 +1,217 @@ +import io +from typing import Any, Dict, List, Optional, Union + +from . import constants +from .hf_api import HfApi +from .utils import build_hf_headers, get_session, is_pillow_available, logging, validate_hf_hub_args +from .utils._deprecation import _deprecate_method + + +logger = logging.get_logger(__name__) + + +ALL_TASKS = [ + # NLP + "text-classification", + "token-classification", + "table-question-answering", + "question-answering", + "zero-shot-classification", + "translation", + "summarization", + "conversational", + "feature-extraction", + "text-generation", + "text2text-generation", + "fill-mask", + "sentence-similarity", + # Audio + "text-to-speech", + "automatic-speech-recognition", + "audio-to-audio", + "audio-classification", + "voice-activity-detection", + # Computer vision + "image-classification", + "object-detection", + "image-segmentation", + "text-to-image", + "image-to-image", + # Others + "tabular-classification", + "tabular-regression", +] + + +class InferenceApi: + """Client to configure requests and make calls to the HuggingFace Inference API. + + Example: + + ```python + >>> from huggingface_hub.inference_api import InferenceApi + + >>> # Mask-fill example + >>> inference = InferenceApi("bert-base-uncased") + >>> inference(inputs="The goal of life is [MASK].") + [{'sequence': 'the goal of life is life.', 'score': 0.10933292657136917, 'token': 2166, 'token_str': 'life'}] + + >>> # Question Answering example + >>> inference = InferenceApi("deepset/roberta-base-squad2") + >>> inputs = { + ... "question": "What's my name?", + ... "context": "My name is Clara and I live in Berkeley.", + ... } + >>> inference(inputs) + {'score': 0.9326569437980652, 'start': 11, 'end': 16, 'answer': 'Clara'} + + >>> # Zero-shot example + >>> inference = InferenceApi("typeform/distilbert-base-uncased-mnli") + >>> inputs = "Hi, I recently bought a device from your company but it is not working as advertised and I would like to get reimbursed!" + >>> params = {"candidate_labels": ["refund", "legal", "faq"]} + >>> inference(inputs, params) + {'sequence': 'Hi, I recently bought a device from your company but it is not working as advertised and I would like to get reimbursed!', 'labels': ['refund', 'faq', 'legal'], 'scores': [0.9378499388694763, 0.04914155602455139, 0.013008488342165947]} + + >>> # Overriding configured task + >>> inference = InferenceApi("bert-base-uncased", task="feature-extraction") + + >>> # Text-to-image + >>> inference = InferenceApi("stabilityai/stable-diffusion-2-1") + >>> inference("cat") + + + >>> # Return as raw response to parse the output yourself + >>> inference = InferenceApi("mio/amadeus") + >>> response = inference("hello world", raw_response=True) + >>> response.headers + {"Content-Type": "audio/flac", ...} + >>> response.content # raw bytes from server + b'(...)' + ``` + """ + + @validate_hf_hub_args + @_deprecate_method( + version="1.0", + message=( + "`InferenceApi` client is deprecated in favor of the more feature-complete `InferenceClient`. Check out" + " this guide to learn how to convert your script to use it:" + " https://huggingface.co/docs/huggingface_hub/guides/inference#legacy-inferenceapi-client." + ), + ) + def __init__( + self, + repo_id: str, + task: Optional[str] = None, + token: Optional[str] = None, + gpu: bool = False, + ): + """Inits headers and API call information. + + Args: + repo_id (``str``): + Id of repository (e.g. `user/bert-base-uncased`). + task (``str``, `optional`, defaults ``None``): + Whether to force a task instead of using task specified in the + repository. + token (`str`, `optional`): + The API token to use as HTTP bearer authorization. This is not + the authentication token. You can find the token in + https://huggingface.co/settings/token. Alternatively, you can + find both your organizations and personal API tokens using + `HfApi().whoami(token)`. + gpu (`bool`, `optional`, defaults `False`): + Whether to use GPU instead of CPU for inference(requires Startup + plan at least). + """ + self.options = {"wait_for_model": True, "use_gpu": gpu} + self.headers = build_hf_headers(token=token) + + # Configure task + model_info = HfApi(token=token).model_info(repo_id=repo_id) + if not model_info.pipeline_tag and not task: + raise ValueError( + "Task not specified in the repository. Please add it to the model card" + " using pipeline_tag" + " (https://huggingface.co/docs#how-is-a-models-type-of-inference-api-and-widget-determined)" + ) + + if task and task != model_info.pipeline_tag: + if task not in ALL_TASKS: + raise ValueError(f"Invalid task {task}. Make sure it's valid.") + + logger.warning( + "You're using a different task than the one specified in the" + " repository. Be sure to know what you're doing :)" + ) + self.task = task + else: + assert model_info.pipeline_tag is not None, "Pipeline tag cannot be None" + self.task = model_info.pipeline_tag + + self.api_url = f"{constants.INFERENCE_ENDPOINT}/pipeline/{self.task}/{repo_id}" + + def __repr__(self): + # Do not add headers to repr to avoid leaking token. + return f"InferenceAPI(api_url='{self.api_url}', task='{self.task}', options={self.options})" + + def __call__( + self, + inputs: Optional[Union[str, Dict, List[str], List[List[str]]]] = None, + params: Optional[Dict] = None, + data: Optional[bytes] = None, + raw_response: bool = False, + ) -> Any: + """Make a call to the Inference API. + + Args: + inputs (`str` or `Dict` or `List[str]` or `List[List[str]]`, *optional*): + Inputs for the prediction. + params (`Dict`, *optional*): + Additional parameters for the models. Will be sent as `parameters` in the + payload. + data (`bytes`, *optional*): + Bytes content of the request. In this case, leave `inputs` and `params` empty. + raw_response (`bool`, defaults to `False`): + If `True`, the raw `Response` object is returned. You can parse its content + as preferred. By default, the content is parsed into a more practical format + (json dictionary or PIL Image for example). + """ + # Build payload + payload: Dict[str, Any] = { + "options": self.options, + } + if inputs: + payload["inputs"] = inputs + if params: + payload["parameters"] = params + + # Make API call + response = get_session().post(self.api_url, headers=self.headers, json=payload, data=data) + + # Let the user handle the response + if raw_response: + return response + + # By default, parse the response for the user. + content_type = response.headers.get("Content-Type") or "" + if content_type.startswith("image"): + if not is_pillow_available(): + raise ImportError( + f"Task '{self.task}' returned as image but Pillow is not installed." + " Please install it (`pip install Pillow`) or pass" + " `raw_response=True` to get the raw `Response` object and parse" + " the image by yourself." + ) + + from PIL import Image + + return Image.open(io.BytesIO(response.content)) + elif content_type == "application/json": + return response.json() + else: + raise NotImplementedError( + f"{content_type} output type is not implemented yet. You can pass" + " `raw_response=True` to get the raw `Response` object and parse the" + " output by yourself." + ) diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/keras_mixin.py b/venv/lib/python3.13/site-packages/huggingface_hub/keras_mixin.py new file mode 100644 index 0000000000000000000000000000000000000000..c284947c1d3c25da421b90e902683054830788d3 --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/keras_mixin.py @@ -0,0 +1,497 @@ +import collections.abc as collections +import json +import os +import warnings +from functools import wraps +from pathlib import Path +from shutil import copytree +from typing import Any, Dict, List, Optional, Union + +from huggingface_hub import ModelHubMixin, snapshot_download +from huggingface_hub.utils import ( + get_tf_version, + is_graphviz_available, + is_pydot_available, + is_tf_available, + yaml_dump, +) + +from . import constants +from .hf_api import HfApi +from .utils import SoftTemporaryDirectory, logging, validate_hf_hub_args +from .utils._typing import CallableT + + +logger = logging.get_logger(__name__) + +keras = None +if is_tf_available(): + # Depending on which version of TensorFlow is installed, we need to import + # keras from the correct location. + # See https://github.com/tensorflow/tensorflow/releases/tag/v2.16.1. + # Note: saving a keras model only works with Keras<3.0. + try: + import tf_keras as keras # type: ignore + except ImportError: + import tensorflow as tf # type: ignore + + keras = tf.keras + + +def _requires_keras_2_model(fn: CallableT) -> CallableT: + # Wrapper to raise if user tries to save a Keras 3.x model + @wraps(fn) + def _inner(model, *args, **kwargs): + if not hasattr(model, "history"): # hacky way to check if model is Keras 2.x + raise NotImplementedError( + f"Cannot use '{fn.__name__}': Keras 3.x is not supported." + " Please save models manually and upload them using `upload_folder` or `hf upload`." + ) + return fn(model, *args, **kwargs) + + return _inner # type: ignore [return-value] + + +def _flatten_dict(dictionary, parent_key=""): + """Flatten a nested dictionary. + Reference: https://stackoverflow.com/a/6027615/10319735 + + Args: + dictionary (`dict`): + The nested dictionary to be flattened. + parent_key (`str`): + The parent key to be prefixed to the children keys. + Necessary for recursing over the nested dictionary. + + Returns: + The flattened dictionary. + """ + items = [] + for key, value in dictionary.items(): + new_key = f"{parent_key}.{key}" if parent_key else key + if isinstance(value, collections.MutableMapping): + items.extend( + _flatten_dict( + value, + new_key, + ).items() + ) + else: + items.append((new_key, value)) + return dict(items) + + +def _create_hyperparameter_table(model): + """Parse hyperparameter dictionary into a markdown table.""" + table = None + if model.optimizer is not None: + optimizer_params = model.optimizer.get_config() + # flatten the configuration + optimizer_params = _flatten_dict(optimizer_params) + optimizer_params["training_precision"] = keras.mixed_precision.global_policy().name + table = "| Hyperparameters | Value |\n| :-- | :-- |\n" + for key, value in optimizer_params.items(): + table += f"| {key} | {value} |\n" + return table + + +def _plot_network(model, save_directory): + keras.utils.plot_model( + model, + to_file=f"{save_directory}/model.png", + show_shapes=False, + show_dtype=False, + show_layer_names=True, + rankdir="TB", + expand_nested=False, + dpi=96, + layer_range=None, + ) + + +def _create_model_card( + model, + repo_dir: Path, + plot_model: bool = True, + metadata: Optional[dict] = None, +): + """ + Creates a model card for the repository. + + Do not overwrite an existing README.md file. + """ + readme_path = repo_dir / "README.md" + if readme_path.exists(): + return + + hyperparameters = _create_hyperparameter_table(model) + if plot_model and is_graphviz_available() and is_pydot_available(): + _plot_network(model, repo_dir) + if metadata is None: + metadata = {} + metadata["library_name"] = "keras" + model_card: str = "---\n" + model_card += yaml_dump(metadata, default_flow_style=False) + model_card += "---\n" + model_card += "\n## Model description\n\nMore information needed\n" + model_card += "\n## Intended uses & limitations\n\nMore information needed\n" + model_card += "\n## Training and evaluation data\n\nMore information needed\n" + if hyperparameters is not None: + model_card += "\n## Training procedure\n" + model_card += "\n### Training hyperparameters\n" + model_card += "\nThe following hyperparameters were used during training:\n\n" + model_card += hyperparameters + model_card += "\n" + if plot_model and os.path.exists(f"{repo_dir}/model.png"): + model_card += "\n ## Model Plot\n" + model_card += "\n
" + model_card += "\nView Model Plot\n" + path_to_plot = "./model.png" + model_card += f"\n![Model Image]({path_to_plot})\n" + model_card += "\n
" + + readme_path.write_text(model_card) + + +@_requires_keras_2_model +def save_pretrained_keras( + model, + save_directory: Union[str, Path], + config: Optional[Dict[str, Any]] = None, + include_optimizer: bool = False, + plot_model: bool = True, + tags: Optional[Union[list, str]] = None, + **model_save_kwargs, +): + """ + Saves a Keras model to save_directory in SavedModel format. Use this if + you're using the Functional or Sequential APIs. + + Args: + model (`Keras.Model`): + The [Keras + model](https://www.tensorflow.org/api_docs/python/tf/keras/Model) + you'd like to save. The model must be compiled and built. + save_directory (`str` or `Path`): + Specify directory in which you want to save the Keras model. + config (`dict`, *optional*): + Configuration object to be saved alongside the model weights. + include_optimizer(`bool`, *optional*, defaults to `False`): + Whether or not to include optimizer in serialization. + plot_model (`bool`, *optional*, defaults to `True`): + Setting this to `True` will plot the model and put it in the model + card. Requires graphviz and pydot to be installed. + tags (Union[`str`,`list`], *optional*): + List of tags that are related to model or string of a single tag. See example tags + [here](https://github.com/huggingface/hub-docs/blob/main/modelcard.md?plain=1). + model_save_kwargs(`dict`, *optional*): + model_save_kwargs will be passed to + [`tf.keras.models.save_model()`](https://www.tensorflow.org/api_docs/python/tf/keras/models/save_model). + """ + if keras is None: + raise ImportError("Called a Tensorflow-specific function but could not import it.") + + if not model.built: + raise ValueError("Model should be built before trying to save") + + save_directory = Path(save_directory) + save_directory.mkdir(parents=True, exist_ok=True) + + # saving config + if config: + if not isinstance(config, dict): + raise RuntimeError(f"Provided config to save_pretrained_keras should be a dict. Got: '{type(config)}'") + + with (save_directory / constants.CONFIG_NAME).open("w") as f: + json.dump(config, f) + + metadata = {} + if isinstance(tags, list): + metadata["tags"] = tags + elif isinstance(tags, str): + metadata["tags"] = [tags] + + task_name = model_save_kwargs.pop("task_name", None) + if task_name is not None: + warnings.warn( + "`task_name` input argument is deprecated. Pass `tags` instead.", + FutureWarning, + ) + if "tags" in metadata: + metadata["tags"].append(task_name) + else: + metadata["tags"] = [task_name] + + if model.history is not None: + if model.history.history != {}: + path = save_directory / "history.json" + if path.exists(): + warnings.warn( + "`history.json` file already exists, it will be overwritten by the history of this version.", + UserWarning, + ) + with path.open("w", encoding="utf-8") as f: + json.dump(model.history.history, f, indent=2, sort_keys=True) + + _create_model_card(model, save_directory, plot_model, metadata) + keras.models.save_model(model, save_directory, include_optimizer=include_optimizer, **model_save_kwargs) + + +def from_pretrained_keras(*args, **kwargs) -> "KerasModelHubMixin": + r""" + Instantiate a pretrained Keras model from a pre-trained model from the Hub. + The model is expected to be in `SavedModel` format. + + Args: + pretrained_model_name_or_path (`str` or `os.PathLike`): + Can be either: + - A string, the `model id` of a pretrained model hosted inside a + model repo on huggingface.co. Valid model ids can be located + at the root-level, like `bert-base-uncased`, or namespaced + under a user or organization name, like + `dbmdz/bert-base-german-cased`. + - You can add `revision` by appending `@` at the end of model_id + simply like this: `dbmdz/bert-base-german-cased@main` Revision + is the specific model version to use. It can be a branch name, + a tag name, or a commit id, since we use a git-based system + for storing models and other artifacts on huggingface.co, so + `revision` can be any identifier allowed by git. + - A path to a `directory` containing model weights saved using + [`~transformers.PreTrainedModel.save_pretrained`], e.g., + `./my_model_directory/`. + - `None` if you are both providing the configuration and state + dictionary (resp. with keyword arguments `config` and + `state_dict`). + force_download (`bool`, *optional*, defaults to `False`): + Whether to force the (re-)download of the model weights and + configuration files, overriding the cached versions if they exist. + proxies (`Dict[str, str]`, *optional*): + A dictionary of proxy servers to use by protocol or endpoint, e.g., + `{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}`. The + proxies are used on each request. + token (`str` or `bool`, *optional*): + The token to use as HTTP bearer authorization for remote files. If + `True`, will use the token generated when running `transformers-cli + login` (stored in `~/.huggingface`). + cache_dir (`Union[str, os.PathLike]`, *optional*): + Path to a directory in which a downloaded pretrained model + configuration should be cached if the standard cache should not be + used. + local_files_only(`bool`, *optional*, defaults to `False`): + Whether to only look at local files (i.e., do not try to download + the model). + model_kwargs (`Dict`, *optional*): + model_kwargs will be passed to the model during initialization + + > [!TIP] + > Passing `token=True` is required when you want to use a private + > model. + """ + return KerasModelHubMixin.from_pretrained(*args, **kwargs) + + +@validate_hf_hub_args +@_requires_keras_2_model +def push_to_hub_keras( + model, + repo_id: str, + *, + config: Optional[dict] = None, + commit_message: str = "Push Keras model using huggingface_hub.", + private: Optional[bool] = None, + api_endpoint: Optional[str] = None, + token: Optional[str] = None, + branch: Optional[str] = None, + create_pr: Optional[bool] = None, + allow_patterns: Optional[Union[List[str], str]] = None, + ignore_patterns: Optional[Union[List[str], str]] = None, + delete_patterns: Optional[Union[List[str], str]] = None, + log_dir: Optional[str] = None, + include_optimizer: bool = False, + tags: Optional[Union[list, str]] = None, + plot_model: bool = True, + **model_save_kwargs, +): + """ + Upload model checkpoint to the Hub. + + Use `allow_patterns` and `ignore_patterns` to precisely filter which files should be pushed to the hub. Use + `delete_patterns` to delete existing remote files in the same commit. See [`upload_folder`] reference for more + details. + + Args: + model (`Keras.Model`): + The [Keras model](`https://www.tensorflow.org/api_docs/python/tf/keras/Model`) you'd like to push to the + Hub. The model must be compiled and built. + repo_id (`str`): + ID of the repository to push to (example: `"username/my-model"`). + commit_message (`str`, *optional*, defaults to "Add Keras model"): + Message to commit while pushing. + private (`bool`, *optional*): + Whether the repository created should be private. + If `None` (default), the repo will be public unless the organization's default is private. + api_endpoint (`str`, *optional*): + The API endpoint to use when pushing the model to the hub. + token (`str`, *optional*): + The token to use as HTTP bearer authorization for remote files. If + not set, will use the token set when logging in with + `hf auth login` (stored in `~/.huggingface`). + branch (`str`, *optional*): + The git branch on which to push the model. This defaults to + the default branch as specified in your repository, which + defaults to `"main"`. + create_pr (`boolean`, *optional*): + Whether or not to create a Pull Request from `branch` with that commit. + Defaults to `False`. + config (`dict`, *optional*): + Configuration object to be saved alongside the model weights. + allow_patterns (`List[str]` or `str`, *optional*): + If provided, only files matching at least one pattern are pushed. + ignore_patterns (`List[str]` or `str`, *optional*): + If provided, files matching any of the patterns are not pushed. + delete_patterns (`List[str]` or `str`, *optional*): + If provided, remote files matching any of the patterns will be deleted from the repo. + log_dir (`str`, *optional*): + TensorBoard logging directory to be pushed. The Hub automatically + hosts and displays a TensorBoard instance if log files are included + in the repository. + include_optimizer (`bool`, *optional*, defaults to `False`): + Whether or not to include optimizer during serialization. + tags (Union[`list`, `str`], *optional*): + List of tags that are related to model or string of a single tag. See example tags + [here](https://github.com/huggingface/hub-docs/blob/main/modelcard.md?plain=1). + plot_model (`bool`, *optional*, defaults to `True`): + Setting this to `True` will plot the model and put it in the model + card. Requires graphviz and pydot to be installed. + model_save_kwargs(`dict`, *optional*): + model_save_kwargs will be passed to + [`tf.keras.models.save_model()`](https://www.tensorflow.org/api_docs/python/tf/keras/models/save_model). + + Returns: + The url of the commit of your model in the given repository. + """ + api = HfApi(endpoint=api_endpoint) + repo_id = api.create_repo(repo_id=repo_id, token=token, private=private, exist_ok=True).repo_id + + # Push the files to the repo in a single commit + with SoftTemporaryDirectory() as tmp: + saved_path = Path(tmp) / repo_id + save_pretrained_keras( + model, + saved_path, + config=config, + include_optimizer=include_optimizer, + tags=tags, + plot_model=plot_model, + **model_save_kwargs, + ) + + # If `log_dir` provided, delete remote logs and upload new ones + if log_dir is not None: + delete_patterns = ( + [] + if delete_patterns is None + else ( + [delete_patterns] # convert `delete_patterns` to a list + if isinstance(delete_patterns, str) + else delete_patterns + ) + ) + delete_patterns.append("logs/*") + copytree(log_dir, saved_path / "logs") + + return api.upload_folder( + repo_type="model", + repo_id=repo_id, + folder_path=saved_path, + commit_message=commit_message, + token=token, + revision=branch, + create_pr=create_pr, + allow_patterns=allow_patterns, + ignore_patterns=ignore_patterns, + delete_patterns=delete_patterns, + ) + + +class KerasModelHubMixin(ModelHubMixin): + """ + Implementation of [`ModelHubMixin`] to provide model Hub upload/download + capabilities to Keras models. + + + ```python + >>> import tensorflow as tf + >>> from huggingface_hub import KerasModelHubMixin + + + >>> class MyModel(tf.keras.Model, KerasModelHubMixin): + ... def __init__(self, **kwargs): + ... super().__init__() + ... self.config = kwargs.pop("config", None) + ... self.dummy_inputs = ... + ... self.layer = ... + + ... def call(self, *args): + ... return ... + + + >>> # Initialize and compile the model as you normally would + >>> model = MyModel() + >>> model.compile(...) + >>> # Build the graph by training it or passing dummy inputs + >>> _ = model(model.dummy_inputs) + >>> # Save model weights to local directory + >>> model.save_pretrained("my-awesome-model") + >>> # Push model weights to the Hub + >>> model.push_to_hub("my-awesome-model") + >>> # Download and initialize weights from the Hub + >>> model = MyModel.from_pretrained("username/super-cool-model") + ``` + """ + + def _save_pretrained(self, save_directory): + save_pretrained_keras(self, save_directory) + + @classmethod + def _from_pretrained( + cls, + model_id, + revision, + cache_dir, + force_download, + proxies, + resume_download, + local_files_only, + token, + config: Optional[Dict[str, Any]] = None, + **model_kwargs, + ): + """Here we just call [`from_pretrained_keras`] function so both the mixin and + functional APIs stay in sync. + + TODO - Some args above aren't used since we are calling + snapshot_download instead of hf_hub_download. + """ + if keras is None: + raise ImportError("Called a TensorFlow-specific function but could not import it.") + + # Root is either a local filepath matching model_id or a cached snapshot + if not os.path.isdir(model_id): + storage_folder = snapshot_download( + repo_id=model_id, + revision=revision, + cache_dir=cache_dir, + library_name="keras", + library_version=get_tf_version(), + ) + else: + storage_folder = model_id + + # TODO: change this in a future PR. We are not returning a KerasModelHubMixin instance here... + model = keras.models.load_model(storage_folder) + + # For now, we add a new attribute, config, to store the config loaded from the hub/a local dir. + model.config = config + + return model diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/lfs.py b/venv/lib/python3.13/site-packages/huggingface_hub/lfs.py new file mode 100644 index 0000000000000000000000000000000000000000..40b6ad087ca6bd33874433439a2c4f5b23d100c5 --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/lfs.py @@ -0,0 +1,466 @@ +# coding=utf-8 +# Copyright 2019-present, the HuggingFace Inc. team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Git LFS related type definitions and utilities""" + +import inspect +import io +import re +import warnings +from dataclasses import dataclass +from math import ceil +from os.path import getsize +from pathlib import Path +from typing import TYPE_CHECKING, BinaryIO, Dict, Iterable, List, Optional, Tuple, TypedDict +from urllib.parse import unquote + +from huggingface_hub import constants + +from .utils import ( + build_hf_headers, + fix_hf_endpoint_in_url, + get_session, + hf_raise_for_status, + http_backoff, + logging, + tqdm, + validate_hf_hub_args, +) +from .utils._lfs import SliceFileObj +from .utils.sha import sha256, sha_fileobj +from .utils.tqdm import is_tqdm_disabled + + +if TYPE_CHECKING: + from ._commit_api import CommitOperationAdd + +logger = logging.get_logger(__name__) + +OID_REGEX = re.compile(r"^[0-9a-f]{40}$") + +LFS_MULTIPART_UPLOAD_COMMAND = "lfs-multipart-upload" + +LFS_HEADERS = { + "Accept": "application/vnd.git-lfs+json", + "Content-Type": "application/vnd.git-lfs+json", +} + + +@dataclass +class UploadInfo: + """ + Dataclass holding required information to determine whether a blob + should be uploaded to the hub using the LFS protocol or the regular protocol + + Args: + sha256 (`bytes`): + SHA256 hash of the blob + size (`int`): + Size in bytes of the blob + sample (`bytes`): + First 512 bytes of the blob + """ + + sha256: bytes + size: int + sample: bytes + + @classmethod + def from_path(cls, path: str): + size = getsize(path) + with io.open(path, "rb") as file: + sample = file.peek(512)[:512] + sha = sha_fileobj(file) + return cls(size=size, sha256=sha, sample=sample) + + @classmethod + def from_bytes(cls, data: bytes): + sha = sha256(data).digest() + return cls(size=len(data), sample=data[:512], sha256=sha) + + @classmethod + def from_fileobj(cls, fileobj: BinaryIO): + sample = fileobj.read(512) + fileobj.seek(0, io.SEEK_SET) + sha = sha_fileobj(fileobj) + size = fileobj.tell() + fileobj.seek(0, io.SEEK_SET) + return cls(size=size, sha256=sha, sample=sample) + + +@validate_hf_hub_args +def post_lfs_batch_info( + upload_infos: Iterable[UploadInfo], + token: Optional[str], + repo_type: str, + repo_id: str, + revision: Optional[str] = None, + endpoint: Optional[str] = None, + headers: Optional[Dict[str, str]] = None, + transfers: Optional[List[str]] = None, +) -> Tuple[List[dict], List[dict], Optional[str]]: + """ + Requests the LFS batch endpoint to retrieve upload instructions + + Learn more: https://github.com/git-lfs/git-lfs/blob/main/docs/api/batch.md + + Args: + upload_infos (`Iterable` of `UploadInfo`): + `UploadInfo` for the files that are being uploaded, typically obtained + from `CommitOperationAdd.upload_info` + repo_type (`str`): + Type of the repo to upload to: `"model"`, `"dataset"` or `"space"`. + repo_id (`str`): + A namespace (user or an organization) and a repo name separated + by a `/`. + revision (`str`, *optional*): + The git revision to upload to. + headers (`dict`, *optional*): + Additional headers to include in the request + transfers (`list`, *optional*): + List of transfer methods to use. Defaults to ["basic", "multipart"]. + + Returns: + `LfsBatchInfo`: 3-tuple: + - First element is the list of upload instructions from the server + - Second element is a list of errors, if any + - Third element is the chosen transfer adapter if provided by the server (e.g. "basic", "multipart", "xet") + + Raises: + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + If an argument is invalid or the server response is malformed. + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) + If the server returned an error. + """ + endpoint = endpoint if endpoint is not None else constants.ENDPOINT + url_prefix = "" + if repo_type in constants.REPO_TYPES_URL_PREFIXES: + url_prefix = constants.REPO_TYPES_URL_PREFIXES[repo_type] + batch_url = f"{endpoint}/{url_prefix}{repo_id}.git/info/lfs/objects/batch" + payload: Dict = { + "operation": "upload", + "transfers": transfers if transfers is not None else ["basic", "multipart"], + "objects": [ + { + "oid": upload.sha256.hex(), + "size": upload.size, + } + for upload in upload_infos + ], + "hash_algo": "sha256", + } + if revision is not None: + payload["ref"] = {"name": unquote(revision)} # revision has been previously 'quoted' + + headers = { + **LFS_HEADERS, + **build_hf_headers(token=token), + **(headers or {}), + } + resp = get_session().post(batch_url, headers=headers, json=payload) + hf_raise_for_status(resp) + batch_info = resp.json() + + objects = batch_info.get("objects", None) + if not isinstance(objects, list): + raise ValueError("Malformed response from server") + + chosen_transfer = batch_info.get("transfer") + chosen_transfer = chosen_transfer if isinstance(chosen_transfer, str) else None + + return ( + [_validate_batch_actions(obj) for obj in objects if "error" not in obj], + [_validate_batch_error(obj) for obj in objects if "error" in obj], + chosen_transfer, + ) + + +class PayloadPartT(TypedDict): + partNumber: int + etag: str + + +class CompletionPayloadT(TypedDict): + """Payload that will be sent to the Hub when uploading multi-part.""" + + oid: str + parts: List[PayloadPartT] + + +def lfs_upload( + operation: "CommitOperationAdd", + lfs_batch_action: Dict, + token: Optional[str] = None, + headers: Optional[Dict[str, str]] = None, + endpoint: Optional[str] = None, +) -> None: + """ + Handles uploading a given object to the Hub with the LFS protocol. + + Can be a No-op if the content of the file is already present on the hub large file storage. + + Args: + operation (`CommitOperationAdd`): + The add operation triggering this upload. + lfs_batch_action (`dict`): + Upload instructions from the LFS batch endpoint for this object. See [`~utils.lfs.post_lfs_batch_info`] for + more details. + headers (`dict`, *optional*): + Headers to include in the request, including authentication and user agent headers. + + Raises: + [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + If `lfs_batch_action` is improperly formatted + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) + If the upload resulted in an error + """ + # 0. If LFS file is already present, skip upload + _validate_batch_actions(lfs_batch_action) + actions = lfs_batch_action.get("actions") + if actions is None: + # The file was already uploaded + logger.debug(f"Content of file {operation.path_in_repo} is already present upstream - skipping upload") + return + + # 1. Validate server response (check required keys in dict) + upload_action = lfs_batch_action["actions"]["upload"] + _validate_lfs_action(upload_action) + verify_action = lfs_batch_action["actions"].get("verify") + if verify_action is not None: + _validate_lfs_action(verify_action) + + # 2. Upload file (either single part or multi-part) + header = upload_action.get("header", {}) + chunk_size = header.get("chunk_size") + upload_url = fix_hf_endpoint_in_url(upload_action["href"], endpoint=endpoint) + if chunk_size is not None: + try: + chunk_size = int(chunk_size) + except (ValueError, TypeError): + raise ValueError( + f"Malformed response from LFS batch endpoint: `chunk_size` should be an integer. Got '{chunk_size}'." + ) + _upload_multi_part(operation=operation, header=header, chunk_size=chunk_size, upload_url=upload_url) + else: + _upload_single_part(operation=operation, upload_url=upload_url) + + # 3. Verify upload went well + if verify_action is not None: + _validate_lfs_action(verify_action) + verify_url = fix_hf_endpoint_in_url(verify_action["href"], endpoint) + verify_resp = get_session().post( + verify_url, + headers=build_hf_headers(token=token, headers=headers), + json={"oid": operation.upload_info.sha256.hex(), "size": operation.upload_info.size}, + ) + hf_raise_for_status(verify_resp) + logger.debug(f"{operation.path_in_repo}: Upload successful") + + +def _validate_lfs_action(lfs_action: dict): + """validates response from the LFS batch endpoint""" + if not ( + isinstance(lfs_action.get("href"), str) + and (lfs_action.get("header") is None or isinstance(lfs_action.get("header"), dict)) + ): + raise ValueError("lfs_action is improperly formatted") + return lfs_action + + +def _validate_batch_actions(lfs_batch_actions: dict): + """validates response from the LFS batch endpoint""" + if not (isinstance(lfs_batch_actions.get("oid"), str) and isinstance(lfs_batch_actions.get("size"), int)): + raise ValueError("lfs_batch_actions is improperly formatted") + + upload_action = lfs_batch_actions.get("actions", {}).get("upload") + verify_action = lfs_batch_actions.get("actions", {}).get("verify") + if upload_action is not None: + _validate_lfs_action(upload_action) + if verify_action is not None: + _validate_lfs_action(verify_action) + return lfs_batch_actions + + +def _validate_batch_error(lfs_batch_error: dict): + """validates response from the LFS batch endpoint""" + if not (isinstance(lfs_batch_error.get("oid"), str) and isinstance(lfs_batch_error.get("size"), int)): + raise ValueError("lfs_batch_error is improperly formatted") + error_info = lfs_batch_error.get("error") + if not ( + isinstance(error_info, dict) + and isinstance(error_info.get("message"), str) + and isinstance(error_info.get("code"), int) + ): + raise ValueError("lfs_batch_error is improperly formatted") + return lfs_batch_error + + +def _upload_single_part(operation: "CommitOperationAdd", upload_url: str) -> None: + """ + Uploads `fileobj` as a single PUT HTTP request (basic LFS transfer protocol) + + Args: + upload_url (`str`): + The URL to PUT the file to. + fileobj: + The file-like object holding the data to upload. + + Returns: `requests.Response` + + Raises: + [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) + If the upload resulted in an error. + """ + with operation.as_file(with_tqdm=True) as fileobj: + # S3 might raise a transient 500 error -> let's retry if that happens + response = http_backoff("PUT", upload_url, data=fileobj) + hf_raise_for_status(response) + + +def _upload_multi_part(operation: "CommitOperationAdd", header: Dict, chunk_size: int, upload_url: str) -> None: + """ + Uploads file using HF multipart LFS transfer protocol. + """ + # 1. Get upload URLs for each part + sorted_parts_urls = _get_sorted_parts_urls(header=header, upload_info=operation.upload_info, chunk_size=chunk_size) + + # 2. Upload parts (either with hf_transfer or in pure Python) + use_hf_transfer = constants.HF_HUB_ENABLE_HF_TRANSFER + if ( + constants.HF_HUB_ENABLE_HF_TRANSFER + and not isinstance(operation.path_or_fileobj, str) + and not isinstance(operation.path_or_fileobj, Path) + ): + warnings.warn( + "hf_transfer is enabled but does not support uploading from bytes or BinaryIO, falling back to regular" + " upload" + ) + use_hf_transfer = False + + response_headers = ( + _upload_parts_hf_transfer(operation=operation, sorted_parts_urls=sorted_parts_urls, chunk_size=chunk_size) + if use_hf_transfer + else _upload_parts_iteratively(operation=operation, sorted_parts_urls=sorted_parts_urls, chunk_size=chunk_size) + ) + + # 3. Send completion request + completion_res = get_session().post( + upload_url, + json=_get_completion_payload(response_headers, operation.upload_info.sha256.hex()), + headers=LFS_HEADERS, + ) + hf_raise_for_status(completion_res) + + +def _get_sorted_parts_urls(header: Dict, upload_info: UploadInfo, chunk_size: int) -> List[str]: + sorted_part_upload_urls = [ + upload_url + for _, upload_url in sorted( + [ + (int(part_num, 10), upload_url) + for part_num, upload_url in header.items() + if part_num.isdigit() and len(part_num) > 0 + ], + key=lambda t: t[0], + ) + ] + num_parts = len(sorted_part_upload_urls) + if num_parts != ceil(upload_info.size / chunk_size): + raise ValueError("Invalid server response to upload large LFS file") + return sorted_part_upload_urls + + +def _get_completion_payload(response_headers: List[Dict], oid: str) -> CompletionPayloadT: + parts: List[PayloadPartT] = [] + for part_number, header in enumerate(response_headers): + etag = header.get("etag") + if etag is None or etag == "": + raise ValueError(f"Invalid etag (`{etag}`) returned for part {part_number + 1}") + parts.append( + { + "partNumber": part_number + 1, + "etag": etag, + } + ) + return {"oid": oid, "parts": parts} + + +def _upload_parts_iteratively( + operation: "CommitOperationAdd", sorted_parts_urls: List[str], chunk_size: int +) -> List[Dict]: + headers = [] + with operation.as_file(with_tqdm=True) as fileobj: + for part_idx, part_upload_url in enumerate(sorted_parts_urls): + with SliceFileObj( + fileobj, + seek_from=chunk_size * part_idx, + read_limit=chunk_size, + ) as fileobj_slice: + # S3 might raise a transient 500 error -> let's retry if that happens + part_upload_res = http_backoff("PUT", part_upload_url, data=fileobj_slice) + hf_raise_for_status(part_upload_res) + headers.append(part_upload_res.headers) + return headers # type: ignore + + +def _upload_parts_hf_transfer( + operation: "CommitOperationAdd", sorted_parts_urls: List[str], chunk_size: int +) -> List[Dict]: + # Upload file using an external Rust-based package. Upload is faster but support less features (no progress bars). + try: + from hf_transfer import multipart_upload + except ImportError: + raise ValueError( + "Fast uploading using 'hf_transfer' is enabled (HF_HUB_ENABLE_HF_TRANSFER=1) but 'hf_transfer' package is" + " not available in your environment. Try `pip install hf_transfer`." + ) + + supports_callback = "callback" in inspect.signature(multipart_upload).parameters + if not supports_callback: + warnings.warn( + "You are using an outdated version of `hf_transfer`. Consider upgrading to latest version to enable progress bars using `pip install -U hf_transfer`." + ) + + total = operation.upload_info.size + desc = operation.path_in_repo + if len(desc) > 40: + desc = f"(…){desc[-40:]}" + + with tqdm( + unit="B", + unit_scale=True, + total=total, + initial=0, + desc=desc, + disable=is_tqdm_disabled(logger.getEffectiveLevel()), + name="huggingface_hub.lfs_upload", + ) as progress: + try: + output = multipart_upload( + file_path=operation.path_or_fileobj, + parts_urls=sorted_parts_urls, + chunk_size=chunk_size, + max_files=128, + parallel_failures=127, # could be removed + max_retries=5, + **({"callback": progress.update} if supports_callback else {}), + ) + except Exception as e: + raise RuntimeError( + "An error occurred while uploading using `hf_transfer`. Consider disabling HF_HUB_ENABLE_HF_TRANSFER for" + " better error handling." + ) from e + if not supports_callback: + progress.update(total) + return output diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/py.typed b/venv/lib/python3.13/site-packages/huggingface_hub/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/repocard.py b/venv/lib/python3.13/site-packages/huggingface_hub/repocard.py new file mode 100644 index 0000000000000000000000000000000000000000..357935c3f1831df2afc86a30f82f10fa8039a225 --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/repocard.py @@ -0,0 +1,827 @@ +import os +import re +from pathlib import Path +from typing import Any, Dict, Literal, Optional, Type, Union + +import requests +import yaml + +from huggingface_hub.file_download import hf_hub_download +from huggingface_hub.hf_api import upload_file +from huggingface_hub.repocard_data import ( + CardData, + DatasetCardData, + EvalResult, + ModelCardData, + SpaceCardData, + eval_results_to_model_index, + model_index_to_eval_results, +) +from huggingface_hub.utils import get_session, is_jinja_available, yaml_dump + +from . import constants +from .errors import EntryNotFoundError +from .utils import SoftTemporaryDirectory, logging, validate_hf_hub_args + + +logger = logging.get_logger(__name__) + + +TEMPLATE_MODELCARD_PATH = Path(__file__).parent / "templates" / "modelcard_template.md" +TEMPLATE_DATASETCARD_PATH = Path(__file__).parent / "templates" / "datasetcard_template.md" + +# exact same regex as in the Hub server. Please keep in sync. +# See https://github.com/huggingface/moon-landing/blob/main/server/lib/ViewMarkdown.ts#L18 +REGEX_YAML_BLOCK = re.compile(r"^(\s*---[\r\n]+)([\S\s]*?)([\r\n]+---(\r\n|\n|$))") + + +class RepoCard: + card_data_class = CardData + default_template_path = TEMPLATE_MODELCARD_PATH + repo_type = "model" + + def __init__(self, content: str, ignore_metadata_errors: bool = False): + """Initialize a RepoCard from string content. The content should be a + Markdown file with a YAML block at the beginning and a Markdown body. + + Args: + content (`str`): The content of the Markdown file. + + Example: + ```python + >>> from huggingface_hub.repocard import RepoCard + >>> text = ''' + ... --- + ... language: en + ... license: mit + ... --- + ... + ... # My repo + ... ''' + >>> card = RepoCard(text) + >>> card.data.to_dict() + {'language': 'en', 'license': 'mit'} + >>> card.text + '\\n# My repo\\n' + + ``` + > [!TIP] + > Raises the following error: + > + > - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + > when the content of the repo card metadata is not a dictionary. + """ + + # Set the content of the RepoCard, as well as underlying .data and .text attributes. + # See the `content` property setter for more details. + self.ignore_metadata_errors = ignore_metadata_errors + self.content = content + + @property + def content(self): + """The content of the RepoCard, including the YAML block and the Markdown body.""" + line_break = _detect_line_ending(self._content) or "\n" + return f"---{line_break}{self.data.to_yaml(line_break=line_break, original_order=self._original_order)}{line_break}---{line_break}{self.text}" + + @content.setter + def content(self, content: str): + """Set the content of the RepoCard.""" + self._content = content + + match = REGEX_YAML_BLOCK.search(content) + if match: + # Metadata found in the YAML block + yaml_block = match.group(2) + self.text = content[match.end() :] + data_dict = yaml.safe_load(yaml_block) + + if data_dict is None: + data_dict = {} + + # The YAML block's data should be a dictionary + if not isinstance(data_dict, dict): + raise ValueError("repo card metadata block should be a dict") + else: + # Model card without metadata... create empty metadata + logger.warning("Repo card metadata block was not found. Setting CardData to empty.") + data_dict = {} + self.text = content + + self.data = self.card_data_class(**data_dict, ignore_metadata_errors=self.ignore_metadata_errors) + self._original_order = list(data_dict.keys()) + + def __str__(self): + return self.content + + def save(self, filepath: Union[Path, str]): + r"""Save a RepoCard to a file. + + Args: + filepath (`Union[Path, str]`): Filepath to the markdown file to save. + + Example: + ```python + >>> from huggingface_hub.repocard import RepoCard + >>> card = RepoCard("---\nlanguage: en\n---\n# This is a test repo card") + >>> card.save("/tmp/test.md") + + ``` + """ + filepath = Path(filepath) + filepath.parent.mkdir(parents=True, exist_ok=True) + # Preserve newlines as in the existing file. + with open(filepath, mode="w", newline="", encoding="utf-8") as f: + f.write(str(self)) + + @classmethod + def load( + cls, + repo_id_or_path: Union[str, Path], + repo_type: Optional[str] = None, + token: Optional[str] = None, + ignore_metadata_errors: bool = False, + ): + """Initialize a RepoCard from a Hugging Face Hub repo's README.md or a local filepath. + + Args: + repo_id_or_path (`Union[str, Path]`): + The repo ID associated with a Hugging Face Hub repo or a local filepath. + repo_type (`str`, *optional*): + The type of Hugging Face repo to push to. Defaults to None, which will use use "model". Other options + are "dataset" and "space". Not used when loading from a local filepath. If this is called from a child + class, the default value will be the child class's `repo_type`. + token (`str`, *optional*): + Authentication token, obtained with `huggingface_hub.HfApi.login` method. Will default to the stored token. + ignore_metadata_errors (`str`): + If True, errors while parsing the metadata section will be ignored. Some information might be lost during + the process. Use it at your own risk. + + Returns: + [`huggingface_hub.repocard.RepoCard`]: The RepoCard (or subclass) initialized from the repo's + README.md file or filepath. + + Example: + ```python + >>> from huggingface_hub.repocard import RepoCard + >>> card = RepoCard.load("nateraw/food") + >>> assert card.data.tags == ["generated_from_trainer", "image-classification", "pytorch"] + + ``` + """ + + if Path(repo_id_or_path).is_file(): + card_path = Path(repo_id_or_path) + elif isinstance(repo_id_or_path, str): + card_path = Path( + hf_hub_download( + repo_id_or_path, + constants.REPOCARD_NAME, + repo_type=repo_type or cls.repo_type, + token=token, + ) + ) + else: + raise ValueError(f"Cannot load RepoCard: path not found on disk ({repo_id_or_path}).") + + # Preserve newlines in the existing file. + with card_path.open(mode="r", newline="", encoding="utf-8") as f: + return cls(f.read(), ignore_metadata_errors=ignore_metadata_errors) + + def validate(self, repo_type: Optional[str] = None): + """Validates card against Hugging Face Hub's card validation logic. + Using this function requires access to the internet, so it is only called + internally by [`huggingface_hub.repocard.RepoCard.push_to_hub`]. + + Args: + repo_type (`str`, *optional*, defaults to "model"): + The type of Hugging Face repo to push to. Options are "model", "dataset", and "space". + If this function is called from a child class, the default will be the child class's `repo_type`. + + > [!TIP] + > Raises the following errors: + > + > - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + > if the card fails validation checks. + > - [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) + > if the request to the Hub API fails for any other reason. + """ + + # If repo type is provided, otherwise, use the repo type of the card. + repo_type = repo_type or self.repo_type + + body = { + "repoType": repo_type, + "content": str(self), + } + headers = {"Accept": "text/plain"} + + try: + r = get_session().post("https://huggingface.co/api/validate-yaml", body, headers=headers) + r.raise_for_status() + except requests.exceptions.HTTPError as exc: + if r.status_code == 400: + raise ValueError(r.text) + else: + raise exc + + def push_to_hub( + self, + repo_id: str, + token: Optional[str] = None, + repo_type: Optional[str] = None, + commit_message: Optional[str] = None, + commit_description: Optional[str] = None, + revision: Optional[str] = None, + create_pr: Optional[bool] = None, + parent_commit: Optional[str] = None, + ): + """Push a RepoCard to a Hugging Face Hub repo. + + Args: + repo_id (`str`): + The repo ID of the Hugging Face Hub repo to push to. Example: "nateraw/food". + token (`str`, *optional*): + Authentication token, obtained with `huggingface_hub.HfApi.login` method. Will default to + the stored token. + repo_type (`str`, *optional*, defaults to "model"): + The type of Hugging Face repo to push to. Options are "model", "dataset", and "space". If this + function is called by a child class, it will default to the child class's `repo_type`. + commit_message (`str`, *optional*): + The summary / title / first line of the generated commit. + commit_description (`str`, *optional*) + The description of the generated commit. + revision (`str`, *optional*): + The git revision to commit from. Defaults to the head of the `"main"` branch. + create_pr (`bool`, *optional*): + Whether or not to create a Pull Request with this commit. Defaults to `False`. + parent_commit (`str`, *optional*): + The OID / SHA of the parent commit, as a hexadecimal string. Shorthands (7 first characters) are also supported. + If specified and `create_pr` is `False`, the commit will fail if `revision` does not point to `parent_commit`. + If specified and `create_pr` is `True`, the pull request will be created from `parent_commit`. + Specifying `parent_commit` ensures the repo has not changed before committing the changes, and can be + especially useful if the repo is updated / committed to concurrently. + Returns: + `str`: URL of the commit which updated the card metadata. + """ + + # If repo type is provided, otherwise, use the repo type of the card. + repo_type = repo_type or self.repo_type + + # Validate card before pushing to hub + self.validate(repo_type=repo_type) + + with SoftTemporaryDirectory() as tmpdir: + tmp_path = Path(tmpdir) / constants.REPOCARD_NAME + tmp_path.write_text(str(self), encoding="utf-8") + url = upload_file( + path_or_fileobj=str(tmp_path), + path_in_repo=constants.REPOCARD_NAME, + repo_id=repo_id, + token=token, + repo_type=repo_type, + commit_message=commit_message, + commit_description=commit_description, + create_pr=create_pr, + revision=revision, + parent_commit=parent_commit, + ) + return url + + @classmethod + def from_template( + cls, + card_data: CardData, + template_path: Optional[str] = None, + template_str: Optional[str] = None, + **template_kwargs, + ): + """Initialize a RepoCard from a template. By default, it uses the default template. + + Templates are Jinja2 templates that can be customized by passing keyword arguments. + + Args: + card_data (`huggingface_hub.CardData`): + A huggingface_hub.CardData instance containing the metadata you want to include in the YAML + header of the repo card on the Hugging Face Hub. + template_path (`str`, *optional*): + A path to a markdown file with optional Jinja template variables that can be filled + in with `template_kwargs`. Defaults to the default template. + + Returns: + [`huggingface_hub.repocard.RepoCard`]: A RepoCard instance with the specified card data and content from the + template. + """ + if is_jinja_available(): + import jinja2 + else: + raise ImportError( + "Using RepoCard.from_template requires Jinja2 to be installed. Please" + " install it with `pip install Jinja2`." + ) + + kwargs = card_data.to_dict().copy() + kwargs.update(template_kwargs) # Template_kwargs have priority + + if template_path is not None: + template_str = Path(template_path).read_text() + if template_str is None: + template_str = Path(cls.default_template_path).read_text() + template = jinja2.Template(template_str) + content = template.render(card_data=card_data.to_yaml(), **kwargs) + return cls(content) + + +class ModelCard(RepoCard): + card_data_class = ModelCardData + default_template_path = TEMPLATE_MODELCARD_PATH + repo_type = "model" + + @classmethod + def from_template( # type: ignore # violates Liskov property but easier to use + cls, + card_data: ModelCardData, + template_path: Optional[str] = None, + template_str: Optional[str] = None, + **template_kwargs, + ): + """Initialize a ModelCard from a template. By default, it uses the default template, which can be found here: + https://github.com/huggingface/huggingface_hub/blob/main/src/huggingface_hub/templates/modelcard_template.md + + Templates are Jinja2 templates that can be customized by passing keyword arguments. + + Args: + card_data (`huggingface_hub.ModelCardData`): + A huggingface_hub.ModelCardData instance containing the metadata you want to include in the YAML + header of the model card on the Hugging Face Hub. + template_path (`str`, *optional*): + A path to a markdown file with optional Jinja template variables that can be filled + in with `template_kwargs`. Defaults to the default template. + + Returns: + [`huggingface_hub.ModelCard`]: A ModelCard instance with the specified card data and content from the + template. + + Example: + ```python + >>> from huggingface_hub import ModelCard, ModelCardData, EvalResult + + >>> # Using the Default Template + >>> card_data = ModelCardData( + ... language='en', + ... license='mit', + ... library_name='timm', + ... tags=['image-classification', 'resnet'], + ... datasets=['beans'], + ... metrics=['accuracy'], + ... ) + >>> card = ModelCard.from_template( + ... card_data, + ... model_description='This model does x + y...' + ... ) + + >>> # Including Evaluation Results + >>> card_data = ModelCardData( + ... language='en', + ... tags=['image-classification', 'resnet'], + ... eval_results=[ + ... EvalResult( + ... task_type='image-classification', + ... dataset_type='beans', + ... dataset_name='Beans', + ... metric_type='accuracy', + ... metric_value=0.9, + ... ), + ... ], + ... model_name='my-cool-model', + ... ) + >>> card = ModelCard.from_template(card_data) + + >>> # Using a Custom Template + >>> card_data = ModelCardData( + ... language='en', + ... tags=['image-classification', 'resnet'] + ... ) + >>> card = ModelCard.from_template( + ... card_data=card_data, + ... template_path='./src/huggingface_hub/templates/modelcard_template.md', + ... custom_template_var='custom value', # will be replaced in template if it exists + ... ) + + ``` + """ + return super().from_template(card_data, template_path, template_str, **template_kwargs) + + +class DatasetCard(RepoCard): + card_data_class = DatasetCardData + default_template_path = TEMPLATE_DATASETCARD_PATH + repo_type = "dataset" + + @classmethod + def from_template( # type: ignore # violates Liskov property but easier to use + cls, + card_data: DatasetCardData, + template_path: Optional[str] = None, + template_str: Optional[str] = None, + **template_kwargs, + ): + """Initialize a DatasetCard from a template. By default, it uses the default template, which can be found here: + https://github.com/huggingface/huggingface_hub/blob/main/src/huggingface_hub/templates/datasetcard_template.md + + Templates are Jinja2 templates that can be customized by passing keyword arguments. + + Args: + card_data (`huggingface_hub.DatasetCardData`): + A huggingface_hub.DatasetCardData instance containing the metadata you want to include in the YAML + header of the dataset card on the Hugging Face Hub. + template_path (`str`, *optional*): + A path to a markdown file with optional Jinja template variables that can be filled + in with `template_kwargs`. Defaults to the default template. + + Returns: + [`huggingface_hub.DatasetCard`]: A DatasetCard instance with the specified card data and content from the + template. + + Example: + ```python + >>> from huggingface_hub import DatasetCard, DatasetCardData + + >>> # Using the Default Template + >>> card_data = DatasetCardData( + ... language='en', + ... license='mit', + ... annotations_creators='crowdsourced', + ... task_categories=['text-classification'], + ... task_ids=['sentiment-classification', 'text-scoring'], + ... multilinguality='monolingual', + ... pretty_name='My Text Classification Dataset', + ... ) + >>> card = DatasetCard.from_template( + ... card_data, + ... pretty_name=card_data.pretty_name, + ... ) + + >>> # Using a Custom Template + >>> card_data = DatasetCardData( + ... language='en', + ... license='mit', + ... ) + >>> card = DatasetCard.from_template( + ... card_data=card_data, + ... template_path='./src/huggingface_hub/templates/datasetcard_template.md', + ... custom_template_var='custom value', # will be replaced in template if it exists + ... ) + + ``` + """ + return super().from_template(card_data, template_path, template_str, **template_kwargs) + + +class SpaceCard(RepoCard): + card_data_class = SpaceCardData + default_template_path = TEMPLATE_MODELCARD_PATH + repo_type = "space" + + +def _detect_line_ending(content: str) -> Literal["\r", "\n", "\r\n", None]: # noqa: F722 + """Detect the line ending of a string. Used by RepoCard to avoid making huge diff on newlines. + + Uses same implementation as in Hub server, keep it in sync. + + Returns: + str: The detected line ending of the string. + """ + cr = content.count("\r") + lf = content.count("\n") + crlf = content.count("\r\n") + if cr + lf == 0: + return None + if crlf == cr and crlf == lf: + return "\r\n" + if cr > lf: + return "\r" + else: + return "\n" + + +def metadata_load(local_path: Union[str, Path]) -> Optional[Dict]: + content = Path(local_path).read_text() + match = REGEX_YAML_BLOCK.search(content) + if match: + yaml_block = match.group(2) + data = yaml.safe_load(yaml_block) + if data is None or isinstance(data, dict): + return data + raise ValueError("repo card metadata block should be a dict") + else: + return None + + +def metadata_save(local_path: Union[str, Path], data: Dict) -> None: + """ + Save the metadata dict in the upper YAML part Trying to preserve newlines as + in the existing file. Docs about open() with newline="" parameter: + https://docs.python.org/3/library/functions.html?highlight=open#open Does + not work with "^M" linebreaks, which are replaced by \n + """ + line_break = "\n" + content = "" + # try to detect existing newline character + if os.path.exists(local_path): + with open(local_path, "r", newline="", encoding="utf8") as readme: + content = readme.read() + if isinstance(readme.newlines, tuple): + line_break = readme.newlines[0] + elif isinstance(readme.newlines, str): + line_break = readme.newlines + + # creates a new file if it not + with open(local_path, "w", newline="", encoding="utf8") as readme: + data_yaml = yaml_dump(data, sort_keys=False, line_break=line_break) + # sort_keys: keep dict order + match = REGEX_YAML_BLOCK.search(content) + if match: + output = content[: match.start()] + f"---{line_break}{data_yaml}---{line_break}" + content[match.end() :] + else: + output = f"---{line_break}{data_yaml}---{line_break}{content}" + + readme.write(output) + readme.close() + + +def metadata_eval_result( + *, + model_pretty_name: str, + task_pretty_name: str, + task_id: str, + metrics_pretty_name: str, + metrics_id: str, + metrics_value: Any, + dataset_pretty_name: str, + dataset_id: str, + metrics_config: Optional[str] = None, + metrics_verified: bool = False, + dataset_config: Optional[str] = None, + dataset_split: Optional[str] = None, + dataset_revision: Optional[str] = None, + metrics_verification_token: Optional[str] = None, +) -> Dict: + """ + Creates a metadata dict with the result from a model evaluated on a dataset. + + Args: + model_pretty_name (`str`): + The name of the model in natural language. + task_pretty_name (`str`): + The name of a task in natural language. + task_id (`str`): + Example: automatic-speech-recognition. A task id. + metrics_pretty_name (`str`): + A name for the metric in natural language. Example: Test WER. + metrics_id (`str`): + Example: wer. A metric id from https://hf.co/metrics. + metrics_value (`Any`): + The value from the metric. Example: 20.0 or "20.0 ± 1.2". + dataset_pretty_name (`str`): + The name of the dataset in natural language. + dataset_id (`str`): + Example: common_voice. A dataset id from https://hf.co/datasets. + metrics_config (`str`, *optional*): + The name of the metric configuration used in `load_metric()`. + Example: bleurt-large-512 in `load_metric("bleurt", "bleurt-large-512")`. + metrics_verified (`bool`, *optional*, defaults to `False`): + Indicates whether the metrics originate from Hugging Face's [evaluation service](https://huggingface.co/spaces/autoevaluate/model-evaluator) or not. Automatically computed by Hugging Face, do not set. + dataset_config (`str`, *optional*): + Example: fr. The name of the dataset configuration used in `load_dataset()`. + dataset_split (`str`, *optional*): + Example: test. The name of the dataset split used in `load_dataset()`. + dataset_revision (`str`, *optional*): + Example: 5503434ddd753f426f4b38109466949a1217c2bb. The name of the dataset dataset revision + used in `load_dataset()`. + metrics_verification_token (`bool`, *optional*): + A JSON Web Token that is used to verify whether the metrics originate from Hugging Face's [evaluation service](https://huggingface.co/spaces/autoevaluate/model-evaluator) or not. + + Returns: + `dict`: a metadata dict with the result from a model evaluated on a dataset. + + Example: + ```python + >>> from huggingface_hub import metadata_eval_result + >>> results = metadata_eval_result( + ... model_pretty_name="RoBERTa fine-tuned on ReactionGIF", + ... task_pretty_name="Text Classification", + ... task_id="text-classification", + ... metrics_pretty_name="Accuracy", + ... metrics_id="accuracy", + ... metrics_value=0.2662102282047272, + ... dataset_pretty_name="ReactionJPEG", + ... dataset_id="julien-c/reactionjpeg", + ... dataset_config="default", + ... dataset_split="test", + ... ) + >>> results == { + ... 'model-index': [ + ... { + ... 'name': 'RoBERTa fine-tuned on ReactionGIF', + ... 'results': [ + ... { + ... 'task': { + ... 'type': 'text-classification', + ... 'name': 'Text Classification' + ... }, + ... 'dataset': { + ... 'name': 'ReactionJPEG', + ... 'type': 'julien-c/reactionjpeg', + ... 'config': 'default', + ... 'split': 'test' + ... }, + ... 'metrics': [ + ... { + ... 'type': 'accuracy', + ... 'value': 0.2662102282047272, + ... 'name': 'Accuracy', + ... 'verified': False + ... } + ... ] + ... } + ... ] + ... } + ... ] + ... } + True + + ``` + """ + + return { + "model-index": eval_results_to_model_index( + model_name=model_pretty_name, + eval_results=[ + EvalResult( + task_name=task_pretty_name, + task_type=task_id, + metric_name=metrics_pretty_name, + metric_type=metrics_id, + metric_value=metrics_value, + dataset_name=dataset_pretty_name, + dataset_type=dataset_id, + metric_config=metrics_config, + verified=metrics_verified, + verify_token=metrics_verification_token, + dataset_config=dataset_config, + dataset_split=dataset_split, + dataset_revision=dataset_revision, + ) + ], + ) + } + + +@validate_hf_hub_args +def metadata_update( + repo_id: str, + metadata: Dict, + *, + repo_type: Optional[str] = None, + overwrite: bool = False, + token: Optional[str] = None, + commit_message: Optional[str] = None, + commit_description: Optional[str] = None, + revision: Optional[str] = None, + create_pr: bool = False, + parent_commit: Optional[str] = None, +) -> str: + """ + Updates the metadata in the README.md of a repository on the Hugging Face Hub. + If the README.md file doesn't exist yet, a new one is created with metadata and an + the default ModelCard or DatasetCard template. For `space` repo, an error is thrown + as a Space cannot exist without a `README.md` file. + + Args: + repo_id (`str`): + The name of the repository. + metadata (`dict`): + A dictionary containing the metadata to be updated. + repo_type (`str`, *optional*): + Set to `"dataset"` or `"space"` if updating to a dataset or space, + `None` or `"model"` if updating to a model. Default is `None`. + overwrite (`bool`, *optional*, defaults to `False`): + If set to `True` an existing field can be overwritten, otherwise + attempting to overwrite an existing field will cause an error. + token (`str`, *optional*): + The Hugging Face authentication token. + commit_message (`str`, *optional*): + The summary / title / first line of the generated commit. Defaults to + `f"Update metadata with huggingface_hub"` + commit_description (`str` *optional*) + The description of the generated commit + revision (`str`, *optional*): + The git revision to commit from. Defaults to the head of the + `"main"` branch. + create_pr (`boolean`, *optional*): + Whether or not to create a Pull Request from `revision` with that commit. + Defaults to `False`. + parent_commit (`str`, *optional*): + The OID / SHA of the parent commit, as a hexadecimal string. Shorthands (7 first characters) are also supported. + If specified and `create_pr` is `False`, the commit will fail if `revision` does not point to `parent_commit`. + If specified and `create_pr` is `True`, the pull request will be created from `parent_commit`. + Specifying `parent_commit` ensures the repo has not changed before committing the changes, and can be + especially useful if the repo is updated / committed to concurrently. + Returns: + `str`: URL of the commit which updated the card metadata. + + Example: + ```python + >>> from huggingface_hub import metadata_update + >>> metadata = {'model-index': [{'name': 'RoBERTa fine-tuned on ReactionGIF', + ... 'results': [{'dataset': {'name': 'ReactionGIF', + ... 'type': 'julien-c/reactiongif'}, + ... 'metrics': [{'name': 'Recall', + ... 'type': 'recall', + ... 'value': 0.7762102282047272}], + ... 'task': {'name': 'Text Classification', + ... 'type': 'text-classification'}}]}]} + >>> url = metadata_update("hf-internal-testing/reactiongif-roberta-card", metadata) + + ``` + """ + commit_message = commit_message if commit_message is not None else "Update metadata with huggingface_hub" + + # Card class given repo_type + card_class: Type[RepoCard] + if repo_type is None or repo_type == "model": + card_class = ModelCard + elif repo_type == "dataset": + card_class = DatasetCard + elif repo_type == "space": + card_class = RepoCard + else: + raise ValueError(f"Unknown repo_type: {repo_type}") + + # Either load repo_card from the Hub or create an empty one. + # NOTE: Will not create the repo if it doesn't exist. + try: + card = card_class.load(repo_id, token=token, repo_type=repo_type) + except EntryNotFoundError: + if repo_type == "space": + raise ValueError("Cannot update metadata on a Space that doesn't contain a `README.md` file.") + + # Initialize a ModelCard or DatasetCard from default template and no data. + # Cast to the concrete expected card type to satisfy type checkers. + card = card_class.from_template(CardData()) # type: ignore[return-value] + + for key, value in metadata.items(): + if key == "model-index": + # if the new metadata doesn't include a name, either use existing one or repo name + if "name" not in value[0]: + value[0]["name"] = getattr(card, "model_name", repo_id) + model_name, new_results = model_index_to_eval_results(value) + if card.data.eval_results is None: + card.data.eval_results = new_results + card.data.model_name = model_name + else: + existing_results = card.data.eval_results + + # Iterate over new results + # Iterate over existing results + # If both results describe the same metric but value is different: + # If overwrite=True: overwrite the metric value + # Else: raise ValueError + # Else: append new result to existing ones. + for new_result in new_results: + result_found = False + for existing_result in existing_results: + if new_result.is_equal_except_value(existing_result): + if new_result != existing_result and not overwrite: + raise ValueError( + "You passed a new value for the existing metric" + f" 'name: {new_result.metric_name}, type: " + f"{new_result.metric_type}'. Set `overwrite=True`" + " to overwrite existing metrics." + ) + result_found = True + existing_result.metric_value = new_result.metric_value + if existing_result.verified is True: + existing_result.verify_token = new_result.verify_token + if not result_found: + card.data.eval_results.append(new_result) + else: + # Any metadata that is not a result metric + if card.data.get(key) is not None and not overwrite and card.data.get(key) != value: + raise ValueError( + f"You passed a new value for the existing meta data field '{key}'." + " Set `overwrite=True` to overwrite existing metadata." + ) + else: + card.data[key] = value + + return card.push_to_hub( + repo_id, + token=token, + repo_type=repo_type, + commit_message=commit_message, + commit_description=commit_description, + create_pr=create_pr, + revision=revision, + parent_commit=parent_commit, + ) diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/repocard_data.py b/venv/lib/python3.13/site-packages/huggingface_hub/repocard_data.py new file mode 100644 index 0000000000000000000000000000000000000000..62215f2274e482d4ed69a1d6deeafdf34fc5a6a4 --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/repocard_data.py @@ -0,0 +1,770 @@ +import copy +from collections import defaultdict +from dataclasses import dataclass +from typing import Any, Dict, List, Optional, Tuple, Union + +from huggingface_hub.utils import logging, yaml_dump + + +logger = logging.get_logger(__name__) + + +@dataclass +class EvalResult: + """ + Flattened representation of individual evaluation results found in model-index of Model Cards. + + For more information on the model-index spec, see https://github.com/huggingface/hub-docs/blob/main/modelcard.md?plain=1. + + Args: + task_type (`str`): + The task identifier. Example: "image-classification". + dataset_type (`str`): + The dataset identifier. Example: "common_voice". Use dataset id from https://hf.co/datasets. + dataset_name (`str`): + A pretty name for the dataset. Example: "Common Voice (French)". + metric_type (`str`): + The metric identifier. Example: "wer". Use metric id from https://hf.co/metrics. + metric_value (`Any`): + The metric value. Example: 0.9 or "20.0 ± 1.2". + task_name (`str`, *optional*): + A pretty name for the task. Example: "Speech Recognition". + dataset_config (`str`, *optional*): + The name of the dataset configuration used in `load_dataset()`. + Example: fr in `load_dataset("common_voice", "fr")`. See the `datasets` docs for more info: + https://hf.co/docs/datasets/package_reference/loading_methods#datasets.load_dataset.name + dataset_split (`str`, *optional*): + The split used in `load_dataset()`. Example: "test". + dataset_revision (`str`, *optional*): + The revision (AKA Git Sha) of the dataset used in `load_dataset()`. + Example: 5503434ddd753f426f4b38109466949a1217c2bb + dataset_args (`Dict[str, Any]`, *optional*): + The arguments passed during `Metric.compute()`. Example for `bleu`: `{"max_order": 4}` + metric_name (`str`, *optional*): + A pretty name for the metric. Example: "Test WER". + metric_config (`str`, *optional*): + The name of the metric configuration used in `load_metric()`. + Example: bleurt-large-512 in `load_metric("bleurt", "bleurt-large-512")`. + See the `datasets` docs for more info: https://huggingface.co/docs/datasets/v2.1.0/en/loading#load-configurations + metric_args (`Dict[str, Any]`, *optional*): + The arguments passed during `Metric.compute()`. Example for `bleu`: max_order: 4 + verified (`bool`, *optional*): + Indicates whether the metrics originate from Hugging Face's [evaluation service](https://huggingface.co/spaces/autoevaluate/model-evaluator) or not. Automatically computed by Hugging Face, do not set. + verify_token (`str`, *optional*): + A JSON Web Token that is used to verify whether the metrics originate from Hugging Face's [evaluation service](https://huggingface.co/spaces/autoevaluate/model-evaluator) or not. + source_name (`str`, *optional*): + The name of the source of the evaluation result. Example: "Open LLM Leaderboard". + source_url (`str`, *optional*): + The URL of the source of the evaluation result. Example: "https://huggingface.co/spaces/open-llm-leaderboard/open_llm_leaderboard". + """ + + # Required + + # The task identifier + # Example: automatic-speech-recognition + task_type: str + + # The dataset identifier + # Example: common_voice. Use dataset id from https://hf.co/datasets + dataset_type: str + + # A pretty name for the dataset. + # Example: Common Voice (French) + dataset_name: str + + # The metric identifier + # Example: wer. Use metric id from https://hf.co/metrics + metric_type: str + + # Value of the metric. + # Example: 20.0 or "20.0 ± 1.2" + metric_value: Any + + # Optional + + # A pretty name for the task. + # Example: Speech Recognition + task_name: Optional[str] = None + + # The name of the dataset configuration used in `load_dataset()`. + # Example: fr in `load_dataset("common_voice", "fr")`. + # See the `datasets` docs for more info: + # https://huggingface.co/docs/datasets/package_reference/loading_methods#datasets.load_dataset.name + dataset_config: Optional[str] = None + + # The split used in `load_dataset()`. + # Example: test + dataset_split: Optional[str] = None + + # The revision (AKA Git Sha) of the dataset used in `load_dataset()`. + # Example: 5503434ddd753f426f4b38109466949a1217c2bb + dataset_revision: Optional[str] = None + + # The arguments passed during `Metric.compute()`. + # Example for `bleu`: max_order: 4 + dataset_args: Optional[Dict[str, Any]] = None + + # A pretty name for the metric. + # Example: Test WER + metric_name: Optional[str] = None + + # The name of the metric configuration used in `load_metric()`. + # Example: bleurt-large-512 in `load_metric("bleurt", "bleurt-large-512")`. + # See the `datasets` docs for more info: https://huggingface.co/docs/datasets/v2.1.0/en/loading#load-configurations + metric_config: Optional[str] = None + + # The arguments passed during `Metric.compute()`. + # Example for `bleu`: max_order: 4 + metric_args: Optional[Dict[str, Any]] = None + + # Indicates whether the metrics originate from Hugging Face's [evaluation service](https://huggingface.co/spaces/autoevaluate/model-evaluator) or not. Automatically computed by Hugging Face, do not set. + verified: Optional[bool] = None + + # A JSON Web Token that is used to verify whether the metrics originate from Hugging Face's [evaluation service](https://huggingface.co/spaces/autoevaluate/model-evaluator) or not. + verify_token: Optional[str] = None + + # The name of the source of the evaluation result. + # Example: Open LLM Leaderboard + source_name: Optional[str] = None + + # The URL of the source of the evaluation result. + # Example: https://huggingface.co/spaces/open-llm-leaderboard/open_llm_leaderboard + source_url: Optional[str] = None + + @property + def unique_identifier(self) -> tuple: + """Returns a tuple that uniquely identifies this evaluation.""" + return ( + self.task_type, + self.dataset_type, + self.dataset_config, + self.dataset_split, + self.dataset_revision, + ) + + def is_equal_except_value(self, other: "EvalResult") -> bool: + """ + Return True if `self` and `other` describe exactly the same metric but with a + different value. + """ + for key, _ in self.__dict__.items(): + if key == "metric_value": + continue + # For metrics computed by Hugging Face's evaluation service, `verify_token` is derived from `metric_value`, + # so we exclude it here in the comparison. + if key != "verify_token" and getattr(self, key) != getattr(other, key): + return False + return True + + def __post_init__(self) -> None: + if self.source_name is not None and self.source_url is None: + raise ValueError("If `source_name` is provided, `source_url` must also be provided.") + + +@dataclass +class CardData: + """Structure containing metadata from a RepoCard. + + [`CardData`] is the parent class of [`ModelCardData`] and [`DatasetCardData`]. + + Metadata can be exported as a dictionary or YAML. Export can be customized to alter the representation of the data + (example: flatten evaluation results). `CardData` behaves as a dictionary (can get, pop, set values) but do not + inherit from `dict` to allow this export step. + """ + + def __init__(self, ignore_metadata_errors: bool = False, **kwargs): + self.__dict__.update(kwargs) + + def to_dict(self): + """Converts CardData to a dict. + + Returns: + `dict`: CardData represented as a dictionary ready to be dumped to a YAML + block for inclusion in a README.md file. + """ + + data_dict = copy.deepcopy(self.__dict__) + self._to_dict(data_dict) + return {key: value for key, value in data_dict.items() if value is not None} + + def _to_dict(self, data_dict): + """Use this method in child classes to alter the dict representation of the data. Alter the dict in-place. + + Args: + data_dict (`dict`): The raw dict representation of the card data. + """ + pass + + def to_yaml(self, line_break=None, original_order: Optional[List[str]] = None) -> str: + """Dumps CardData to a YAML block for inclusion in a README.md file. + + Args: + line_break (str, *optional*): + The line break to use when dumping to yaml. + + Returns: + `str`: CardData represented as a YAML block. + """ + if original_order: + self.__dict__ = { + k: self.__dict__[k] + for k in original_order + list(set(self.__dict__.keys()) - set(original_order)) + if k in self.__dict__ + } + return yaml_dump(self.to_dict(), sort_keys=False, line_break=line_break).strip() + + def __repr__(self): + return repr(self.__dict__) + + def __str__(self): + return self.to_yaml() + + def get(self, key: str, default: Any = None) -> Any: + """Get value for a given metadata key.""" + value = self.__dict__.get(key) + return default if value is None else value + + def pop(self, key: str, default: Any = None) -> Any: + """Pop value for a given metadata key.""" + return self.__dict__.pop(key, default) + + def __getitem__(self, key: str) -> Any: + """Get value for a given metadata key.""" + return self.__dict__[key] + + def __setitem__(self, key: str, value: Any) -> None: + """Set value for a given metadata key.""" + self.__dict__[key] = value + + def __contains__(self, key: str) -> bool: + """Check if a given metadata key is set.""" + return key in self.__dict__ + + def __len__(self) -> int: + """Return the number of metadata keys set.""" + return len(self.__dict__) + + +def _validate_eval_results( + eval_results: Optional[Union[EvalResult, List[EvalResult]]], + model_name: Optional[str], +) -> List[EvalResult]: + if eval_results is None: + return [] + if isinstance(eval_results, EvalResult): + eval_results = [eval_results] + if not isinstance(eval_results, list) or not all(isinstance(r, EvalResult) for r in eval_results): + raise ValueError( + f"`eval_results` should be of type `EvalResult` or a list of `EvalResult`, got {type(eval_results)}." + ) + if model_name is None: + raise ValueError("Passing `eval_results` requires `model_name` to be set.") + return eval_results + + +class ModelCardData(CardData): + """Model Card Metadata that is used by Hugging Face Hub when included at the top of your README.md + + Args: + base_model (`str` or `List[str]`, *optional*): + The identifier of the base model from which the model derives. This is applicable for example if your model is a + fine-tune or adapter of an existing model. The value must be the ID of a model on the Hub (or a list of IDs + if your model derives from multiple models). Defaults to None. + datasets (`Union[str, List[str]]`, *optional*): + Dataset or list of datasets that were used to train this model. Should be a dataset ID + found on https://hf.co/datasets. Defaults to None. + eval_results (`Union[List[EvalResult], EvalResult]`, *optional*): + List of `huggingface_hub.EvalResult` that define evaluation results of the model. If provided, + `model_name` is used to as a name on PapersWithCode's leaderboards. Defaults to `None`. + language (`Union[str, List[str]]`, *optional*): + Language of model's training data or metadata. It must be an ISO 639-1, 639-2 or + 639-3 code (two/three letters), or a special value like "code", "multilingual". Defaults to `None`. + library_name (`str`, *optional*): + Name of library used by this model. Example: keras or any library from + https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/src/model-libraries.ts. + Defaults to None. + license (`str`, *optional*): + License of this model. Example: apache-2.0 or any license from + https://huggingface.co/docs/hub/repositories-licenses. Defaults to None. + license_name (`str`, *optional*): + Name of the license of this model. Defaults to None. To be used in conjunction with `license_link`. + Common licenses (Apache-2.0, MIT, CC-BY-SA-4.0) do not need a name. In that case, use `license` instead. + license_link (`str`, *optional*): + Link to the license of this model. Defaults to None. To be used in conjunction with `license_name`. + Common licenses (Apache-2.0, MIT, CC-BY-SA-4.0) do not need a link. In that case, use `license` instead. + metrics (`List[str]`, *optional*): + List of metrics used to evaluate this model. Should be a metric name that can be found + at https://hf.co/metrics. Example: 'accuracy'. Defaults to None. + model_name (`str`, *optional*): + A name for this model. It is used along with + `eval_results` to construct the `model-index` within the card's metadata. The name + you supply here is what will be used on PapersWithCode's leaderboards. If None is provided + then the repo name is used as a default. Defaults to None. + pipeline_tag (`str`, *optional*): + The pipeline tag associated with the model. Example: "text-classification". + tags (`List[str]`, *optional*): + List of tags to add to your model that can be used when filtering on the Hugging + Face Hub. Defaults to None. + ignore_metadata_errors (`str`): + If True, errors while parsing the metadata section will be ignored. Some information might be lost during + the process. Use it at your own risk. + kwargs (`dict`, *optional*): + Additional metadata that will be added to the model card. Defaults to None. + + Example: + ```python + >>> from huggingface_hub import ModelCardData + >>> card_data = ModelCardData( + ... language="en", + ... license="mit", + ... library_name="timm", + ... tags=['image-classification', 'resnet'], + ... ) + >>> card_data.to_dict() + {'language': 'en', 'license': 'mit', 'library_name': 'timm', 'tags': ['image-classification', 'resnet']} + + ``` + """ + + def __init__( + self, + *, + base_model: Optional[Union[str, List[str]]] = None, + datasets: Optional[Union[str, List[str]]] = None, + eval_results: Optional[List[EvalResult]] = None, + language: Optional[Union[str, List[str]]] = None, + library_name: Optional[str] = None, + license: Optional[str] = None, + license_name: Optional[str] = None, + license_link: Optional[str] = None, + metrics: Optional[List[str]] = None, + model_name: Optional[str] = None, + pipeline_tag: Optional[str] = None, + tags: Optional[List[str]] = None, + ignore_metadata_errors: bool = False, + **kwargs, + ): + self.base_model = base_model + self.datasets = datasets + self.eval_results = eval_results + self.language = language + self.library_name = library_name + self.license = license + self.license_name = license_name + self.license_link = license_link + self.metrics = metrics + self.model_name = model_name + self.pipeline_tag = pipeline_tag + self.tags = _to_unique_list(tags) + + model_index = kwargs.pop("model-index", None) + if model_index: + try: + model_name, eval_results = model_index_to_eval_results(model_index) + self.model_name = model_name + self.eval_results = eval_results + except (KeyError, TypeError) as error: + if ignore_metadata_errors: + logger.warning("Invalid model-index. Not loading eval results into CardData.") + else: + raise ValueError( + f"Invalid `model_index` in metadata cannot be parsed: {error.__class__} {error}. Pass" + " `ignore_metadata_errors=True` to ignore this error while loading a Model Card. Warning:" + " some information will be lost. Use it at your own risk." + ) + + super().__init__(**kwargs) + + if self.eval_results: + try: + self.eval_results = _validate_eval_results(self.eval_results, self.model_name) + except Exception as e: + if ignore_metadata_errors: + logger.warning(f"Failed to validate eval_results: {e}. Not loading eval results into CardData.") + else: + raise ValueError(f"Failed to validate eval_results: {e}") from e + + def _to_dict(self, data_dict): + """Format the internal data dict. In this case, we convert eval results to a valid model index""" + if self.eval_results is not None: + data_dict["model-index"] = eval_results_to_model_index(self.model_name, self.eval_results) + del data_dict["eval_results"], data_dict["model_name"] + + +class DatasetCardData(CardData): + """Dataset Card Metadata that is used by Hugging Face Hub when included at the top of your README.md + + Args: + language (`List[str]`, *optional*): + Language of dataset's data or metadata. It must be an ISO 639-1, 639-2 or + 639-3 code (two/three letters), or a special value like "code", "multilingual". + license (`Union[str, List[str]]`, *optional*): + License(s) of this dataset. Example: apache-2.0 or any license from + https://huggingface.co/docs/hub/repositories-licenses. + annotations_creators (`Union[str, List[str]]`, *optional*): + How the annotations for the dataset were created. + Options are: 'found', 'crowdsourced', 'expert-generated', 'machine-generated', 'no-annotation', 'other'. + language_creators (`Union[str, List[str]]`, *optional*): + How the text-based data in the dataset was created. + Options are: 'found', 'crowdsourced', 'expert-generated', 'machine-generated', 'other' + multilinguality (`Union[str, List[str]]`, *optional*): + Whether the dataset is multilingual. + Options are: 'monolingual', 'multilingual', 'translation', 'other'. + size_categories (`Union[str, List[str]]`, *optional*): + The number of examples in the dataset. Options are: 'n<1K', '1K1T', and 'other'. + source_datasets (`List[str]]`, *optional*): + Indicates whether the dataset is an original dataset or extended from another existing dataset. + Options are: 'original' and 'extended'. + task_categories (`Union[str, List[str]]`, *optional*): + What categories of task does the dataset support? + task_ids (`Union[str, List[str]]`, *optional*): + What specific tasks does the dataset support? + paperswithcode_id (`str`, *optional*): + ID of the dataset on PapersWithCode. + pretty_name (`str`, *optional*): + A more human-readable name for the dataset. (ex. "Cats vs. Dogs") + train_eval_index (`Dict`, *optional*): + A dictionary that describes the necessary spec for doing evaluation on the Hub. + If not provided, it will be gathered from the 'train-eval-index' key of the kwargs. + config_names (`Union[str, List[str]]`, *optional*): + A list of the available dataset configs for the dataset. + """ + + def __init__( + self, + *, + language: Optional[Union[str, List[str]]] = None, + license: Optional[Union[str, List[str]]] = None, + annotations_creators: Optional[Union[str, List[str]]] = None, + language_creators: Optional[Union[str, List[str]]] = None, + multilinguality: Optional[Union[str, List[str]]] = None, + size_categories: Optional[Union[str, List[str]]] = None, + source_datasets: Optional[List[str]] = None, + task_categories: Optional[Union[str, List[str]]] = None, + task_ids: Optional[Union[str, List[str]]] = None, + paperswithcode_id: Optional[str] = None, + pretty_name: Optional[str] = None, + train_eval_index: Optional[Dict] = None, + config_names: Optional[Union[str, List[str]]] = None, + ignore_metadata_errors: bool = False, + **kwargs, + ): + self.annotations_creators = annotations_creators + self.language_creators = language_creators + self.language = language + self.license = license + self.multilinguality = multilinguality + self.size_categories = size_categories + self.source_datasets = source_datasets + self.task_categories = task_categories + self.task_ids = task_ids + self.paperswithcode_id = paperswithcode_id + self.pretty_name = pretty_name + self.config_names = config_names + + # TODO - maybe handle this similarly to EvalResult? + self.train_eval_index = train_eval_index or kwargs.pop("train-eval-index", None) + super().__init__(**kwargs) + + def _to_dict(self, data_dict): + data_dict["train-eval-index"] = data_dict.pop("train_eval_index") + + +class SpaceCardData(CardData): + """Space Card Metadata that is used by Hugging Face Hub when included at the top of your README.md + + To get an exhaustive reference of Spaces configuration, please visit https://huggingface.co/docs/hub/spaces-config-reference#spaces-configuration-reference. + + Args: + title (`str`, *optional*) + Title of the Space. + sdk (`str`, *optional*) + SDK of the Space (one of `gradio`, `streamlit`, `docker`, or `static`). + sdk_version (`str`, *optional*) + Version of the used SDK (if Gradio/Streamlit sdk). + python_version (`str`, *optional*) + Python version used in the Space (if Gradio/Streamlit sdk). + app_file (`str`, *optional*) + Path to your main application file (which contains either gradio or streamlit Python code, or static html code). + Path is relative to the root of the repository. + app_port (`str`, *optional*) + Port on which your application is running. Used only if sdk is `docker`. + license (`str`, *optional*) + License of this model. Example: apache-2.0 or any license from + https://huggingface.co/docs/hub/repositories-licenses. + duplicated_from (`str`, *optional*) + ID of the original Space if this is a duplicated Space. + models (List[`str`], *optional*) + List of models related to this Space. Should be a dataset ID found on https://hf.co/models. + datasets (`List[str]`, *optional*) + List of datasets related to this Space. Should be a dataset ID found on https://hf.co/datasets. + tags (`List[str]`, *optional*) + List of tags to add to your Space that can be used when filtering on the Hub. + ignore_metadata_errors (`str`): + If True, errors while parsing the metadata section will be ignored. Some information might be lost during + the process. Use it at your own risk. + kwargs (`dict`, *optional*): + Additional metadata that will be added to the space card. + + Example: + ```python + >>> from huggingface_hub import SpaceCardData + >>> card_data = SpaceCardData( + ... title="Dreambooth Training", + ... license="mit", + ... sdk="gradio", + ... duplicated_from="multimodalart/dreambooth-training" + ... ) + >>> card_data.to_dict() + {'title': 'Dreambooth Training', 'sdk': 'gradio', 'license': 'mit', 'duplicated_from': 'multimodalart/dreambooth-training'} + ``` + """ + + def __init__( + self, + *, + title: Optional[str] = None, + sdk: Optional[str] = None, + sdk_version: Optional[str] = None, + python_version: Optional[str] = None, + app_file: Optional[str] = None, + app_port: Optional[int] = None, + license: Optional[str] = None, + duplicated_from: Optional[str] = None, + models: Optional[List[str]] = None, + datasets: Optional[List[str]] = None, + tags: Optional[List[str]] = None, + ignore_metadata_errors: bool = False, + **kwargs, + ): + self.title = title + self.sdk = sdk + self.sdk_version = sdk_version + self.python_version = python_version + self.app_file = app_file + self.app_port = app_port + self.license = license + self.duplicated_from = duplicated_from + self.models = models + self.datasets = datasets + self.tags = _to_unique_list(tags) + super().__init__(**kwargs) + + +def model_index_to_eval_results(model_index: List[Dict[str, Any]]) -> Tuple[str, List[EvalResult]]: + """Takes in a model index and returns the model name and a list of `huggingface_hub.EvalResult` objects. + + A detailed spec of the model index can be found here: + https://github.com/huggingface/hub-docs/blob/main/modelcard.md?plain=1 + + Args: + model_index (`List[Dict[str, Any]]`): + A model index data structure, likely coming from a README.md file on the + Hugging Face Hub. + + Returns: + model_name (`str`): + The name of the model as found in the model index. This is used as the + identifier for the model on leaderboards like PapersWithCode. + eval_results (`List[EvalResult]`): + A list of `huggingface_hub.EvalResult` objects containing the metrics + reported in the provided model_index. + + Example: + ```python + >>> from huggingface_hub.repocard_data import model_index_to_eval_results + >>> # Define a minimal model index + >>> model_index = [ + ... { + ... "name": "my-cool-model", + ... "results": [ + ... { + ... "task": { + ... "type": "image-classification" + ... }, + ... "dataset": { + ... "type": "beans", + ... "name": "Beans" + ... }, + ... "metrics": [ + ... { + ... "type": "accuracy", + ... "value": 0.9 + ... } + ... ] + ... } + ... ] + ... } + ... ] + >>> model_name, eval_results = model_index_to_eval_results(model_index) + >>> model_name + 'my-cool-model' + >>> eval_results[0].task_type + 'image-classification' + >>> eval_results[0].metric_type + 'accuracy' + + ``` + """ + + eval_results = [] + for elem in model_index: + name = elem["name"] + results = elem["results"] + for result in results: + task_type = result["task"]["type"] + task_name = result["task"].get("name") + dataset_type = result["dataset"]["type"] + dataset_name = result["dataset"]["name"] + dataset_config = result["dataset"].get("config") + dataset_split = result["dataset"].get("split") + dataset_revision = result["dataset"].get("revision") + dataset_args = result["dataset"].get("args") + source_name = result.get("source", {}).get("name") + source_url = result.get("source", {}).get("url") + + for metric in result["metrics"]: + metric_type = metric["type"] + metric_value = metric["value"] + metric_name = metric.get("name") + metric_args = metric.get("args") + metric_config = metric.get("config") + verified = metric.get("verified") + verify_token = metric.get("verifyToken") + + eval_result = EvalResult( + task_type=task_type, # Required + dataset_type=dataset_type, # Required + dataset_name=dataset_name, # Required + metric_type=metric_type, # Required + metric_value=metric_value, # Required + task_name=task_name, + dataset_config=dataset_config, + dataset_split=dataset_split, + dataset_revision=dataset_revision, + dataset_args=dataset_args, + metric_name=metric_name, + metric_args=metric_args, + metric_config=metric_config, + verified=verified, + verify_token=verify_token, + source_name=source_name, + source_url=source_url, + ) + eval_results.append(eval_result) + return name, eval_results + + +def _remove_none(obj): + """ + Recursively remove `None` values from a dict. Borrowed from: https://stackoverflow.com/a/20558778 + """ + if isinstance(obj, (list, tuple, set)): + return type(obj)(_remove_none(x) for x in obj if x is not None) + elif isinstance(obj, dict): + return type(obj)((_remove_none(k), _remove_none(v)) for k, v in obj.items() if k is not None and v is not None) + else: + return obj + + +def eval_results_to_model_index(model_name: str, eval_results: List[EvalResult]) -> List[Dict[str, Any]]: + """Takes in given model name and list of `huggingface_hub.EvalResult` and returns a + valid model-index that will be compatible with the format expected by the + Hugging Face Hub. + + Args: + model_name (`str`): + Name of the model (ex. "my-cool-model"). This is used as the identifier + for the model on leaderboards like PapersWithCode. + eval_results (`List[EvalResult]`): + List of `huggingface_hub.EvalResult` objects containing the metrics to be + reported in the model-index. + + Returns: + model_index (`List[Dict[str, Any]]`): The eval_results converted to a model-index. + + Example: + ```python + >>> from huggingface_hub.repocard_data import eval_results_to_model_index, EvalResult + >>> # Define minimal eval_results + >>> eval_results = [ + ... EvalResult( + ... task_type="image-classification", # Required + ... dataset_type="beans", # Required + ... dataset_name="Beans", # Required + ... metric_type="accuracy", # Required + ... metric_value=0.9, # Required + ... ) + ... ] + >>> eval_results_to_model_index("my-cool-model", eval_results) + [{'name': 'my-cool-model', 'results': [{'task': {'type': 'image-classification'}, 'dataset': {'name': 'Beans', 'type': 'beans'}, 'metrics': [{'type': 'accuracy', 'value': 0.9}]}]}] + + ``` + """ + + # Metrics are reported on a unique task-and-dataset basis. + # Here, we make a map of those pairs and the associated EvalResults. + task_and_ds_types_map: Dict[Any, List[EvalResult]] = defaultdict(list) + for eval_result in eval_results: + task_and_ds_types_map[eval_result.unique_identifier].append(eval_result) + + # Use the map from above to generate the model index data. + model_index_data = [] + for results in task_and_ds_types_map.values(): + # All items from `results` share same metadata + sample_result = results[0] + data = { + "task": { + "type": sample_result.task_type, + "name": sample_result.task_name, + }, + "dataset": { + "name": sample_result.dataset_name, + "type": sample_result.dataset_type, + "config": sample_result.dataset_config, + "split": sample_result.dataset_split, + "revision": sample_result.dataset_revision, + "args": sample_result.dataset_args, + }, + "metrics": [ + { + "type": result.metric_type, + "value": result.metric_value, + "name": result.metric_name, + "config": result.metric_config, + "args": result.metric_args, + "verified": result.verified, + "verifyToken": result.verify_token, + } + for result in results + ], + } + if sample_result.source_url is not None: + source = { + "url": sample_result.source_url, + } + if sample_result.source_name is not None: + source["name"] = sample_result.source_name + data["source"] = source + model_index_data.append(data) + + # TODO - Check if there cases where this list is longer than one? + # Finally, the model index itself is list of dicts. + model_index = [ + { + "name": model_name, + "results": model_index_data, + } + ] + return _remove_none(model_index) + + +def _to_unique_list(tags: Optional[List[str]]) -> Optional[List[str]]: + if tags is None: + return tags + unique_tags = [] # make tags unique + keep order explicitly + for tag in tags: + if tag not in unique_tags: + unique_tags.append(tag) + return unique_tags diff --git a/venv/lib/python3.13/site-packages/huggingface_hub/repository.py b/venv/lib/python3.13/site-packages/huggingface_hub/repository.py new file mode 100644 index 0000000000000000000000000000000000000000..56e2bce619dff404476f95343fb039a0dae9fc56 --- /dev/null +++ b/venv/lib/python3.13/site-packages/huggingface_hub/repository.py @@ -0,0 +1,1471 @@ +import atexit +import os +import re +import subprocess +import threading +import time +from contextlib import contextmanager +from pathlib import Path +from typing import Callable, Dict, Iterator, List, Optional, Tuple, TypedDict, Union +from urllib.parse import urlparse + +from huggingface_hub import constants +from huggingface_hub.repocard import metadata_load, metadata_save + +from .hf_api import HfApi, repo_type_and_id_from_hf_id +from .lfs import LFS_MULTIPART_UPLOAD_COMMAND +from .utils import ( + SoftTemporaryDirectory, + get_token, + logging, + run_subprocess, + tqdm, + validate_hf_hub_args, +) +from .utils._deprecation import _deprecate_method + + +logger = logging.get_logger(__name__) + + +class CommandInProgress: + """ + Utility to follow commands launched asynchronously. + """ + + def __init__( + self, + title: str, + is_done_method: Callable, + status_method: Callable, + process: subprocess.Popen, + post_method: Optional[Callable] = None, + ): + self.title = title + self._is_done = is_done_method + self._status = status_method + self._process = process + self._stderr = "" + self._stdout = "" + self._post_method = post_method + + @property + def is_done(self) -> bool: + """ + Whether the process is done. + """ + result = self._is_done() + + if result and self._post_method is not None: + self._post_method() + self._post_method = None + + return result + + @property + def status(self) -> int: + """ + The exit code/status of the current action. Will return `0` if the + command has completed successfully, and a number between 1 and 255 if + the process errored-out. + + Will return -1 if the command is still ongoing. + """ + return self._status() + + @property + def failed(self) -> bool: + """ + Whether the process errored-out. + """ + return self.status > 0 + + @property + def stderr(self) -> str: + """ + The current output message on the standard error. + """ + if self._process.stderr is not None: + self._stderr += self._process.stderr.read() + return self._stderr + + @property + def stdout(self) -> str: + """ + The current output message on the standard output. + """ + if self._process.stdout is not None: + self._stdout += self._process.stdout.read() + return self._stdout + + def __repr__(self): + status = self.status + + if status == -1: + status = "running" + + return ( + f"[{self.title} command, status code: {status}," + f" {'in progress.' if not self.is_done else 'finished.'} PID:" + f" {self._process.pid}]" + ) + + +def is_git_repo(folder: Union[str, Path]) -> bool: + """ + Check if the folder is the root or part of a git repository + + Args: + folder (`str`): + The folder in which to run the command. + + Returns: + `bool`: `True` if the repository is part of a repository, `False` + otherwise. + """ + folder_exists = os.path.exists(os.path.join(folder, ".git")) + git_branch = subprocess.run("git branch".split(), cwd=folder, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + return folder_exists and git_branch.returncode == 0 + + +def is_local_clone(folder: Union[str, Path], remote_url: str) -> bool: + """ + Check if the folder is a local clone of the remote_url + + Args: + folder (`str` or `Path`): + The folder in which to run the command. + remote_url (`str`): + The url of a git repository. + + Returns: + `bool`: `True` if the repository is a local clone of the remote + repository specified, `False` otherwise. + """ + if not is_git_repo(folder): + return False + + remotes = run_subprocess("git remote -v", folder).stdout + + # Remove token for the test with remotes. + remote_url = re.sub(r"https://.*@", "https://", remote_url) + remotes = [re.sub(r"https://.*@", "https://", remote) for remote in remotes.split()] + return remote_url in remotes + + +def is_tracked_with_lfs(filename: Union[str, Path]) -> bool: + """ + Check if the file passed is tracked with git-lfs. + + Args: + filename (`str` or `Path`): + The filename to check. + + Returns: + `bool`: `True` if the file passed is tracked with git-lfs, `False` + otherwise. + """ + folder = Path(filename).parent + filename = Path(filename).name + + try: + p = run_subprocess("git check-attr -a".split() + [filename], folder) + attributes = p.stdout.strip() + except subprocess.CalledProcessError as exc: + if not is_git_repo(folder): + return False + else: + raise OSError(exc.stderr) + + if len(attributes) == 0: + return False + + found_lfs_tag = {"diff": False, "merge": False, "filter": False} + + for attribute in attributes.split("\n"): + for tag in found_lfs_tag.keys(): + if tag in attribute and "lfs" in attribute: + found_lfs_tag[tag] = True + + return all(found_lfs_tag.values()) + + +def is_git_ignored(filename: Union[str, Path]) -> bool: + """ + Check if file is git-ignored. Supports nested .gitignore files. + + Args: + filename (`str` or `Path`): + The filename to check. + + Returns: + `bool`: `True` if the file passed is ignored by `git`, `False` + otherwise. + """ + folder = Path(filename).parent + filename = Path(filename).name + + try: + p = run_subprocess("git check-ignore".split() + [filename], folder, check=False) + # Will return exit code 1 if not gitignored + is_ignored = not bool(p.returncode) + except subprocess.CalledProcessError as exc: + raise OSError(exc.stderr) + + return is_ignored + + +def is_binary_file(filename: Union[str, Path]) -> bool: + """ + Check if file is a binary file. + + Args: + filename (`str` or `Path`): + The filename to check. + + Returns: + `bool`: `True` if the file passed is a binary file, `False` otherwise. + """ + try: + with open(filename, "rb") as f: + content = f.read(10 * (1024**2)) # Read a maximum of 10MB + + # Code sample taken from the following stack overflow thread + # https://stackoverflow.com/questions/898669/how-can-i-detect-if-a-file-is-binary-non-text-in-python/7392391#7392391 + text_chars = bytearray({7, 8, 9, 10, 12, 13, 27} | set(range(0x20, 0x100)) - {0x7F}) + return bool(content.translate(None, text_chars)) + except UnicodeDecodeError: + return True + + +def files_to_be_staged(pattern: str = ".", folder: Union[str, Path, None] = None) -> List[str]: + """ + Returns a list of filenames that are to be staged. + + Args: + pattern (`str` or `Path`): + The pattern of filenames to check. Put `.` to get all files. + folder (`str` or `Path`): + The folder in which to run the command. + + Returns: + `List[str]`: List of files that are to be staged. + """ + try: + p = run_subprocess("git ls-files --exclude-standard -mo".split() + [pattern], folder) + if len(p.stdout.strip()): + files = p.stdout.strip().split("\n") + else: + files = [] + except subprocess.CalledProcessError as exc: + raise EnvironmentError(exc.stderr) + + return files + + +def is_tracked_upstream(folder: Union[str, Path]) -> bool: + """ + Check if the current checked-out branch is tracked upstream. + + Args: + folder (`str` or `Path`): + The folder in which to run the command. + + Returns: + `bool`: `True` if the current checked-out branch is tracked upstream, + `False` otherwise. + """ + try: + run_subprocess("git rev-parse --symbolic-full-name --abbrev-ref @{u}", folder) + return True + except subprocess.CalledProcessError as exc: + if "HEAD" in exc.stderr: + raise OSError("No branch checked out") + + return False + + +def commits_to_push(folder: Union[str, Path], upstream: Optional[str] = None) -> int: + """ + Check the number of commits that would be pushed upstream + + Args: + folder (`str` or `Path`): + The folder in which to run the command. + upstream (`str`, *optional*): + The name of the upstream repository with which the comparison should be + made. + + Returns: + `int`: Number of commits that would be pushed upstream were a `git + push` to proceed. + """ + try: + result = run_subprocess(f"git cherry -v {upstream or ''}", folder) + return len(result.stdout.split("\n")) - 1 + except subprocess.CalledProcessError as exc: + raise EnvironmentError(exc.stderr) + + +class PbarT(TypedDict): + # Used to store an opened progress bar in `_lfs_log_progress` + bar: tqdm + past_bytes: int + + +@contextmanager +def _lfs_log_progress(): + """ + This is a context manager that will log the Git LFS progress of cleaning, + smudging, pulling and pushing. + """ + + if logger.getEffectiveLevel() >= logging.ERROR: + try: + yield + except Exception: + pass + return + + def output_progress(stopping_event: threading.Event): + """ + To be launched as a separate thread with an event meaning it should stop + the tail. + """ + # Key is tuple(state, filename), value is a dict(tqdm bar and a previous value) + pbars: Dict[Tuple[str, str], PbarT] = {} + + def close_pbars(): + for pbar in pbars.values(): + pbar["bar"].update(pbar["bar"].total - pbar["past_bytes"]) + pbar["bar"].refresh() + pbar["bar"].close() + + def tail_file(filename) -> Iterator[str]: + """ + Creates a generator to be iterated through, which will return each + line one by one. Will stop tailing the file if the stopping_event is + set. + """ + with open(filename, "r") as file: + current_line = "" + while True: + if stopping_event.is_set(): + close_pbars() + break + + line_bit = file.readline() + if line_bit is not None and not len(line_bit.strip()) == 0: + current_line += line_bit + if current_line.endswith("\n"): + yield current_line + current_line = "" + else: + time.sleep(1) + + # If the file isn't created yet, wait for a few seconds before trying again. + # Can be interrupted with the stopping_event. + while not os.path.exists(os.environ["GIT_LFS_PROGRESS"]): + if stopping_event.is_set(): + close_pbars() + return + + time.sleep(2) + + for line in tail_file(os.environ["GIT_LFS_PROGRESS"]): + try: + state, file_progress, byte_progress, filename = line.split() + except ValueError as error: + # Try/except to ease debugging. See https://github.com/huggingface/huggingface_hub/issues/1373. + raise ValueError(f"Cannot unpack LFS progress line:\n{line}") from error + description = f"{state.capitalize()} file {filename}" + + current_bytes, total_bytes = byte_progress.split("/") + current_bytes_int = int(current_bytes) + total_bytes_int = int(total_bytes) + + pbar = pbars.get((state, filename)) + if pbar is None: + # Initialize progress bar + pbars[(state, filename)] = { + "bar": tqdm( + desc=description, + initial=current_bytes_int, + total=total_bytes_int, + unit="B", + unit_scale=True, + unit_divisor=1024, + name="huggingface_hub.lfs_upload", + ), + "past_bytes": int(current_bytes), + } + else: + # Update progress bar + pbar["bar"].update(current_bytes_int - pbar["past_bytes"]) + pbar["past_bytes"] = current_bytes_int + + current_lfs_progress_value = os.environ.get("GIT_LFS_PROGRESS", "") + + with SoftTemporaryDirectory() as tmpdir: + os.environ["GIT_LFS_PROGRESS"] = os.path.join(tmpdir, "lfs_progress") + logger.debug(f"Following progress in {os.environ['GIT_LFS_PROGRESS']}") + + exit_event = threading.Event() + x = threading.Thread(target=output_progress, args=(exit_event,), daemon=True) + x.start() + + try: + yield + finally: + exit_event.set() + x.join() + + os.environ["GIT_LFS_PROGRESS"] = current_lfs_progress_value + + +class Repository: + """ + Helper class to wrap the git and git-lfs commands. + + The aim is to facilitate interacting with huggingface.co hosted model or + dataset repos, though not a lot here (if any) is actually specific to + huggingface.co. + + > [!WARNING] + > [`Repository`] is deprecated in favor of the http-based alternatives implemented in + > [`HfApi`]. Given its large adoption in legacy code, the complete removal of + > [`Repository`] will only happen in release `v1.0`. For more details, please read + > https://huggingface.co/docs/huggingface_hub/concepts/git_vs_http. + """ + + command_queue: List[CommandInProgress] + + @validate_hf_hub_args + @_deprecate_method( + version="1.0", + message=( + "Please prefer the http-based alternatives instead. Given its large adoption in legacy code, the complete" + " removal is only planned on next major release.\nFor more details, please read" + " https://huggingface.co/docs/huggingface_hub/concepts/git_vs_http." + ), + ) + def __init__( + self, + local_dir: Union[str, Path], + clone_from: Optional[str] = None, + repo_type: Optional[str] = None, + token: Union[bool, str] = True, + git_user: Optional[str] = None, + git_email: Optional[str] = None, + revision: Optional[str] = None, + skip_lfs_files: bool = False, + client: Optional[HfApi] = None, + ): + """ + Instantiate a local clone of a git repo. + + If `clone_from` is set, the repo will be cloned from an existing remote repository. + If the remote repo does not exist, a `EnvironmentError` exception will be thrown. + Please create the remote repo first using [`create_repo`]. + + `Repository` uses the local git credentials by default. If explicitly set, the `token` + or the `git_user`/`git_email` pair will be used instead. + + Args: + local_dir (`str` or `Path`): + path (e.g. `'my_trained_model/'`) to the local directory, where + the `Repository` will be initialized. + clone_from (`str`, *optional*): + Either a repository url or `repo_id`. + Example: + - `"https://huggingface.co/philschmid/playground-tests"` + - `"philschmid/playground-tests"` + repo_type (`str`, *optional*): + To set when cloning a repo from a repo_id. Default is model. + token (`bool` or `str`, *optional*): + A valid authentication token (see https://huggingface.co/settings/token). + If `None` or `True` and machine is logged in (through `hf auth login` + or [`~huggingface_hub.login`]), token will be retrieved from the cache. + If `False`, token is not sent in the request header. + git_user (`str`, *optional*): + will override the `git config user.name` for committing and + pushing files to the hub. + git_email (`str`, *optional*): + will override the `git config user.email` for committing and + pushing files to the hub. + revision (`str`, *optional*): + Revision to checkout after initializing the repository. If the + revision doesn't exist, a branch will be created with that + revision name from the default branch's current HEAD. + skip_lfs_files (`bool`, *optional*, defaults to `False`): + whether to skip git-LFS files or not. + client (`HfApi`, *optional*): + Instance of [`HfApi`] to use when calling the HF Hub API. A new + instance will be created if this is left to `None`. + + Raises: + [`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError) + If the remote repository set in `clone_from` does not exist. + """ + if isinstance(local_dir, Path): + local_dir = str(local_dir) + os.makedirs(local_dir, exist_ok=True) + self.local_dir = os.path.join(os.getcwd(), local_dir) + self._repo_type = repo_type + self.command_queue = [] + self.skip_lfs_files = skip_lfs_files + self.client = client if client is not None else HfApi() + + self.check_git_versions() + + if isinstance(token, str): + self.huggingface_token: Optional[str] = token + elif token is False: + self.huggingface_token = None + else: + # if `True` -> explicit use of the cached token + # if `None` -> implicit use of the cached token + self.huggingface_token = get_token() + + if clone_from is not None: + self.clone_from(repo_url=clone_from) + else: + if is_git_repo(self.local_dir): + logger.debug("[Repository] is a valid git repo") + else: + raise ValueError("If not specifying `clone_from`, you need to pass Repository a valid git clone.") + + if self.huggingface_token is not None and (git_email is None or git_user is None): + user = self.client.whoami(self.huggingface_token) + + if git_email is None: + git_email = user.get("email") + + if git_user is None: + git_user = user.get("fullname") + + if git_user is not None or git_email is not None: + self.git_config_username_and_email(git_user, git_email) + + self.lfs_enable_largefiles() + self.git_credential_helper_store() + + if revision is not None: + self.git_checkout(revision, create_branch_ok=True) + + # This ensures that all commands exit before exiting the Python runtime. + # This will ensure all pushes register on the hub, even if other errors happen in subsequent operations. + atexit.register(self.wait_for_commands) + + @property + def current_branch(self) -> str: + """ + Returns the current checked out branch. + + Returns: + `str`: Current checked out branch. + """ + try: + result = run_subprocess("git rev-parse --abbrev-ref HEAD", self.local_dir).stdout.strip() + except subprocess.CalledProcessError as exc: + raise EnvironmentError(exc.stderr) + + return result + + def check_git_versions(self): + """ + Checks that `git` and `git-lfs` can be run. + + Raises: + [`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError) + If `git` or `git-lfs` are not installed. + """ + try: + git_version = run_subprocess("git --version", self.local_dir).stdout.strip() + except FileNotFoundError: + raise EnvironmentError("Looks like you do not have git installed, please install.") + + try: + lfs_version = run_subprocess("git-lfs --version", self.local_dir).stdout.strip() + except FileNotFoundError: + raise EnvironmentError( + "Looks like you do not have git-lfs installed, please install." + " You can install from https://git-lfs.github.com/." + " Then run `git lfs install` (you only have to do this once)." + ) + logger.info(git_version + "\n" + lfs_version) + + @validate_hf_hub_args + def clone_from(self, repo_url: str, token: Union[bool, str, None] = None): + """ + Clone from a remote. If the folder already exists, will try to clone the + repository within it. + + If this folder is a git repository with linked history, will try to + update the repository. + + Args: + repo_url (`str`): + The URL from which to clone the repository + token (`Union[str, bool]`, *optional*): + Whether to use the authentication token. It can be: + - a string which is the token itself + - `False`, which would not use the authentication token + - `True`, which would fetch the authentication token from the + local folder and use it (you should be logged in for this to + work). + - `None`, which would retrieve the value of + `self.huggingface_token`. + + > [!TIP] + > Raises the following error: + > + > - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) + > if an organization token (starts with "api_org") is passed. Use must use + > your own personal access token (see https://hf.co/settings/tokens). + > + > - [`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError) + > if you are trying to clone the repository in a non-empty folder, or if the + > `git` operations raise errors. + """ + token = ( + token # str -> use it + if isinstance(token, str) + else ( + None # `False` -> explicit no token + if token is False + else self.huggingface_token # `None` or `True` -> use default + ) + ) + if token is not None and token.startswith("api_org"): + raise ValueError( + "You must use your personal access token, not an Organization token" + " (see https://hf.co/settings/tokens)." + ) + + hub_url = self.client.endpoint + if hub_url in repo_url or ("http" not in repo_url and len(repo_url.split("/")) <= 2): + repo_type, namespace, repo_name = repo_type_and_id_from_hf_id(repo_url, hub_url=hub_url) + repo_id = f"{namespace}/{repo_name}" if namespace is not None else repo_name + + if repo_type is not None: + self._repo_type = repo_type + + repo_url = hub_url + "/" + + if self._repo_type in constants.REPO_TYPES_URL_PREFIXES: + repo_url += constants.REPO_TYPES_URL_PREFIXES[self._repo_type] + + if token is not None: + # Add token in git url when provided + scheme = urlparse(repo_url).scheme + repo_url = repo_url.replace(f"{scheme}://", f"{scheme}://user:{token}@") + + repo_url += repo_id + + # For error messages, it's cleaner to show the repo url without the token. + clean_repo_url = re.sub(r"(https?)://.*@", r"\1://", repo_url) + try: + run_subprocess("git lfs install", self.local_dir) + + # checks if repository is initialized in a empty repository or in one with files + if len(os.listdir(self.local_dir)) == 0: + logger.warning(f"Cloning {clean_repo_url} into local empty directory.") + + with _lfs_log_progress(): + env = os.environ.copy() + + if self.skip_lfs_files: + env.update({"GIT_LFS_SKIP_SMUDGE": "1"}) + + run_subprocess( + # 'git lfs clone' is deprecated (will display a warning in the terminal) + # but we still use it as it provides a nicer UX when downloading large + # files (shows progress). + f"{'git clone' if self.skip_lfs_files else 'git lfs clone'} {repo_url} .", + self.local_dir, + env=env, + ) + else: + # Check if the folder is the root of a git repository + if not is_git_repo(self.local_dir): + raise EnvironmentError( + "Tried to clone a repository in a non-empty folder that isn't" + f" a git repository ('{self.local_dir}'). If you really want to" + f" do this, do it manually:\n cd {self.local_dir} && git init" + " && git remote add origin && git pull origin main\n or clone" + " repo to a new folder and move your existing files there" + " afterwards." + ) + + if is_local_clone(self.local_dir, repo_url): + logger.warning( + f"{self.local_dir} is already a clone of {clean_repo_url}." + " Make sure you pull the latest changes with" + " `repo.git_pull()`." + ) + else: + output = run_subprocess("git remote get-url origin", self.local_dir, check=False) + + error_msg = ( + f"Tried to clone {clean_repo_url} in an unrelated git" + " repository.\nIf you believe this is an error, please add" + f" a remote with the following URL: {clean_repo_url}." + ) + if output.returncode == 0: + clean_local_remote_url = re.sub(r"https://.*@", "https://", output.stdout) + error_msg += f"\nLocal path has its origin defined as: {clean_local_remote_url}" + raise EnvironmentError(error_msg) + + except subprocess.CalledProcessError as exc: + raise EnvironmentError(exc.stderr) + + def git_config_username_and_email(self, git_user: Optional[str] = None, git_email: Optional[str] = None): + """ + Sets git username and email (only in the current repo). + + Args: + git_user (`str`, *optional*): + The username to register through `git`. + git_email (`str`, *optional*): + The email to register through `git`. + """ + try: + if git_user is not None: + run_subprocess("git config user.name".split() + [git_user], self.local_dir) + + if git_email is not None: + run_subprocess(f"git config user.email {git_email}".split(), self.local_dir) + except subprocess.CalledProcessError as exc: + raise EnvironmentError(exc.stderr) + + def git_credential_helper_store(self): + """ + Sets the git credential helper to `store` + """ + try: + run_subprocess("git config credential.helper store", self.local_dir) + except subprocess.CalledProcessError as exc: + raise EnvironmentError(exc.stderr) + + def git_head_hash(self) -> str: + """ + Get commit sha on top of HEAD. + + Returns: + `str`: The current checked out commit SHA. + """ + try: + p = run_subprocess("git rev-parse HEAD", self.local_dir) + return p.stdout.strip() + except subprocess.CalledProcessError as exc: + raise EnvironmentError(exc.stderr) + + def git_remote_url(self) -> str: + """ + Get URL to origin remote. + + Returns: + `str`: The URL of the `origin` remote. + """ + try: + p = run_subprocess("git config --get remote.origin.url", self.local_dir) + url = p.stdout.strip() + # Strip basic auth info. + return re.sub(r"https://.*@", "https://", url) + except subprocess.CalledProcessError as exc: + raise EnvironmentError(exc.stderr) + + def git_head_commit_url(self) -> str: + """ + Get URL to last commit on HEAD. We assume it's been pushed, and the url + scheme is the same one as for GitHub or HuggingFace. + + Returns: + `str`: The URL to the current checked-out commit. + """ + sha = self.git_head_hash() + url = self.git_remote_url() + if url.endswith("/"): + url = url[:-1] + return f"{url}/commit/{sha}" + + def list_deleted_files(self) -> List[str]: + """ + Returns a list of the files that are deleted in the working directory or + index. + + Returns: + `List[str]`: A list of files that have been deleted in the working + directory or index. + """ + try: + git_status = run_subprocess("git status -s", self.local_dir).stdout.strip() + except subprocess.CalledProcessError as exc: + raise EnvironmentError(exc.stderr) + + if len(git_status) == 0: + return [] + + # Receives a status like the following + # D .gitignore + # D new_file.json + # AD new_file1.json + # ?? new_file2.json + # ?? new_file4.json + + # Strip each line of whitespaces + modified_files_statuses = [status.strip() for status in git_status.split("\n")] + + # Only keep files that are deleted using the D prefix + deleted_files_statuses = [status for status in modified_files_statuses if "D" in status.split()[0]] + + # Remove the D prefix and strip to keep only the relevant filename + deleted_files = [status.split()[-1].strip() for status in deleted_files_statuses] + + return deleted_files + + def lfs_track(self, patterns: Union[str, List[str]], filename: bool = False): + """ + Tell git-lfs to track files according to a pattern. + + Setting the `filename` argument to `True` will treat the arguments as + literal filenames, not as patterns. Any special glob characters in the + filename will be escaped when writing to the `.gitattributes` file. + + Args: + patterns (`Union[str, List[str]]`): + The pattern, or list of patterns, to track with git-lfs. + filename (`bool`, *optional*, defaults to `False`): + Whether to use the patterns as literal filenames. + """ + if isinstance(patterns, str): + patterns = [patterns] + try: + for pattern in patterns: + run_subprocess( + f"git lfs track {'--filename' if filename else ''} {pattern}", + self.local_dir, + ) + except subprocess.CalledProcessError as exc: + raise EnvironmentError(exc.stderr) + + def lfs_untrack(self, patterns: Union[str, List[str]]): + """ + Tell git-lfs to untrack those files. + + Args: + patterns (`Union[str, List[str]]`): + The pattern, or list of patterns, to untrack with git-lfs. + """ + if isinstance(patterns, str): + patterns = [patterns] + try: + for pattern in patterns: + run_subprocess("git lfs untrack".split() + [pattern], self.local_dir) + except subprocess.CalledProcessError as exc: + raise EnvironmentError(exc.stderr) + + def lfs_enable_largefiles(self): + """ + HF-specific. This enables upload support of files >5GB. + """ + try: + lfs_config = "git config lfs.customtransfer.multipart" + run_subprocess(f"{lfs_config}.path hf", self.local_dir) + run_subprocess( + f"{lfs_config}.args {LFS_MULTIPART_UPLOAD_COMMAND}", + self.local_dir, + ) + except subprocess.CalledProcessError as exc: + raise EnvironmentError(exc.stderr) + + def auto_track_binary_files(self, pattern: str = ".") -> List[str]: + """ + Automatically track binary files with git-lfs. + + Args: + pattern (`str`, *optional*, defaults to "."): + The pattern with which to track files that are binary. + + Returns: + `List[str]`: List of filenames that are now tracked due to being + binary files + """ + files_to_be_tracked_with_lfs = [] + + deleted_files = self.list_deleted_files() + + for filename in files_to_be_staged(pattern, folder=self.local_dir): + if filename in deleted_files: + continue + + path_to_file = os.path.join(os.getcwd(), self.local_dir, filename) + + if not (is_tracked_with_lfs(path_to_file) or is_git_ignored(path_to_file)): + size_in_mb = os.path.getsize(path_to_file) / (1024 * 1024) + + if size_in_mb >= 10: + logger.warning( + "Parsing a large file to check if binary or not. Tracking large" + " files using `repository.auto_track_large_files` is" + " recommended so as to not load the full file in memory." + ) + + is_binary = is_binary_file(path_to_file) + + if is_binary: + self.lfs_track(filename) + files_to_be_tracked_with_lfs.append(filename) + + # Cleanup the .gitattributes if files were deleted + self.lfs_untrack(deleted_files) + + return files_to_be_tracked_with_lfs + + def auto_track_large_files(self, pattern: str = ".") -> List[str]: + """ + Automatically track large files (files that weigh more than 10MBs) with + git-lfs. + + Args: + pattern (`str`, *optional*, defaults to "."): + The pattern with which to track files that are above 10MBs. + + Returns: + `List[str]`: List of filenames that are now tracked due to their + size. + """ + files_to_be_tracked_with_lfs = [] + + deleted_files = self.list_deleted_files() + + for filename in files_to_be_staged(pattern, folder=self.local_dir): + if filename in deleted_files: + continue + + path_to_file = os.path.join(os.getcwd(), self.local_dir, filename) + size_in_mb = os.path.getsize(path_to_file) / (1024 * 1024) + + if size_in_mb >= 10 and not is_tracked_with_lfs(path_to_file) and not is_git_ignored(path_to_file): + self.lfs_track(filename) + files_to_be_tracked_with_lfs.append(filename) + + # Cleanup the .gitattributes if files were deleted + self.lfs_untrack(deleted_files) + + return files_to_be_tracked_with_lfs + + def lfs_prune(self, recent=False): + """ + git lfs prune + + Args: + recent (`bool`, *optional*, defaults to `False`): + Whether to prune files even if they were referenced by recent + commits. See the following + [link](https://github.com/git-lfs/git-lfs/blob/f3d43f0428a84fc4f1e5405b76b5a73ec2437e65/docs/man/git-lfs-prune.1.ronn#recent-files) + for more information. + """ + try: + with _lfs_log_progress(): + result = run_subprocess(f"git lfs prune {'--recent' if recent else ''}", self.local_dir) + logger.info(result.stdout) + except subprocess.CalledProcessError as exc: + raise EnvironmentError(exc.stderr) + + def git_pull(self, rebase: bool = False, lfs: bool = False): + """ + git pull + + Args: + rebase (`bool`, *optional*, defaults to `False`): + Whether to rebase the current branch on top of the upstream + branch after fetching. + lfs (`bool`, *optional*, defaults to `False`): + Whether to fetch the LFS files too. This option only changes the + behavior when a repository was cloned without fetching the LFS + files; calling `repo.git_pull(lfs=True)` will then fetch the LFS + file from the remote repository. + """ + command = "git pull" if not lfs else "git lfs pull" + if rebase: + command += " --rebase" + try: + with _lfs_log_progress(): + result = run_subprocess(command, self.local_dir) + logger.info(result.stdout) + except subprocess.CalledProcessError as exc: + raise EnvironmentError(exc.stderr) + + def git_add(self, pattern: str = ".", auto_lfs_track: bool = False): + """ + git add + + Setting the `auto_lfs_track` parameter to `True` will automatically + track files that are larger than 10MB with `git-lfs`. + + Args: + pattern (`str`, *optional*, defaults to "."): + The pattern with which to add files to staging. + auto_lfs_track (`bool`, *optional*, defaults to `False`): + Whether to automatically track large and binary files with + git-lfs. Any file over 10MB in size, or in binary format, will + be automatically tracked. + """ + if auto_lfs_track: + # Track files according to their size (>=10MB) + tracked_files = self.auto_track_large_files(pattern) + + # Read the remaining files and track them if they're binary + tracked_files.extend(self.auto_track_binary_files(pattern)) + + if tracked_files: + logger.warning( + f"Adding files tracked by Git LFS: {tracked_files}. This may take a" + " bit of time if the files are large." + ) + + try: + result = run_subprocess("git add -v".split() + [pattern], self.local_dir) + logger.info(f"Adding to index:\n{result.stdout}\n") + except subprocess.CalledProcessError as exc: + raise EnvironmentError(exc.stderr) + + def git_commit(self, commit_message: str = "commit files to HF hub"): + """ + git commit + + Args: + commit_message (`str`, *optional*, defaults to "commit files to HF hub"): + The message attributed to the commit. + """ + try: + result = run_subprocess("git commit -v -m".split() + [commit_message], self.local_dir) + logger.info(f"Committed:\n{result.stdout}\n") + except subprocess.CalledProcessError as exc: + if len(exc.stderr) > 0: + raise EnvironmentError(exc.stderr) + else: + raise EnvironmentError(exc.stdout) + + def git_push( + self, + upstream: Optional[str] = None, + blocking: bool = True, + auto_lfs_prune: bool = False, + ) -> Union[str, Tuple[str, CommandInProgress]]: + """ + git push + + If used without setting `blocking`, will return url to commit on remote + repo. If used with `blocking=True`, will return a tuple containing the + url to commit and the command object to follow for information about the + process. + + Args: + upstream (`str`, *optional*): + Upstream to which this should push. If not specified, will push + to the lastly defined upstream or to the default one (`origin + main`). + blocking (`bool`, *optional*, defaults to `True`): + Whether the function should return only when the push has + finished. Setting this to `False` will return an + `CommandInProgress` object which has an `is_done` property. This + property will be set to `True` when the push is finished. + auto_lfs_prune (`bool`, *optional*, defaults to `False`): + Whether to automatically prune files once they have been pushed + to the remote. + """ + command = "git push" + + if upstream: + command += f" --set-upstream {upstream}" + + number_of_commits = commits_to_push(self.local_dir, upstream) + + if number_of_commits > 1: + logger.warning(f"Several commits ({number_of_commits}) will be pushed upstream.") + if blocking: + logger.warning("The progress bars may be unreliable.") + + try: + with _lfs_log_progress(): + process = subprocess.Popen( + command.split(), + stderr=subprocess.PIPE, + stdout=subprocess.PIPE, + encoding="utf-8", + cwd=self.local_dir, + ) + + if blocking: + stdout, stderr = process.communicate() + return_code = process.poll() + process.kill() + + if len(stderr): + logger.warning(stderr) + + if return_code: + raise subprocess.CalledProcessError(return_code, process.args, output=stdout, stderr=stderr) + + except subprocess.CalledProcessError as exc: + raise EnvironmentError(exc.stderr) + + if not blocking: + + def status_method(): + status = process.poll() + if status is None: + return -1 + else: + return status + + command_in_progress = CommandInProgress( + "push", + is_done_method=lambda: process.poll() is not None, + status_method=status_method, + process=process, + post_method=self.lfs_prune if auto_lfs_prune else None, + ) + + self.command_queue.append(command_in_progress) + + return self.git_head_commit_url(), command_in_progress + + if auto_lfs_prune: + self.lfs_prune() + + return self.git_head_commit_url() + + def git_checkout(self, revision: str, create_branch_ok: bool = False): + """ + git checkout a given revision + + Specifying `create_branch_ok` to `True` will create the branch to the + given revision if that revision doesn't exist. + + Args: + revision (`str`): + The revision to checkout. + create_branch_ok (`str`, *optional*, defaults to `False`): + Whether creating a branch named with the `revision` passed at + the current checked-out reference if `revision` isn't an + existing revision is allowed. + """ + try: + result = run_subprocess(f"git checkout {revision}", self.local_dir) + logger.warning(f"Checked out {revision} from {self.current_branch}.") + logger.warning(result.stdout) + except subprocess.CalledProcessError as exc: + if not create_branch_ok: + raise EnvironmentError(exc.stderr) + else: + try: + result = run_subprocess(f"git checkout -b {revision}", self.local_dir) + logger.warning( + f"Revision `{revision}` does not exist. Created and checked out branch `{revision}`." + ) + logger.warning(result.stdout) + except subprocess.CalledProcessError as exc: + raise EnvironmentError(exc.stderr) + + def tag_exists(self, tag_name: str, remote: Optional[str] = None) -> bool: + """ + Check if a tag exists or not. + + Args: + tag_name (`str`): + The name of the tag to check. + remote (`str`, *optional*): + Whether to check if the tag exists on a remote. This parameter + should be the identifier of the remote. + + Returns: + `bool`: Whether the tag exists. + """ + if remote: + try: + result = run_subprocess(f"git ls-remote origin refs/tags/{tag_name}", self.local_dir).stdout.strip() + except subprocess.CalledProcessError as exc: + raise EnvironmentError(exc.stderr) + + return len(result) != 0 + else: + try: + git_tags = run_subprocess("git tag", self.local_dir).stdout.strip() + except subprocess.CalledProcessError as exc: + raise EnvironmentError(exc.stderr) + + git_tags = git_tags.split("\n") + return tag_name in git_tags + + def delete_tag(self, tag_name: str, remote: Optional[str] = None) -> bool: + """ + Delete a tag, both local and remote, if it exists + + Args: + tag_name (`str`): + The tag name to delete. + remote (`str`, *optional*): + The remote on which to delete the tag. + + Returns: + `bool`: `True` if deleted, `False` if the tag didn't exist. + If remote is not passed, will just be updated locally + """ + delete_locally = True + delete_remotely = True + + if not self.tag_exists(tag_name): + delete_locally = False + + if not self.tag_exists(tag_name, remote=remote): + delete_remotely = False + + if delete_locally: + try: + run_subprocess(["git", "tag", "-d", tag_name], self.local_dir).stdout.strip() + except subprocess.CalledProcessError as exc: + raise EnvironmentError(exc.stderr) + + if remote and delete_remotely: + try: + run_subprocess(f"git push {remote} --delete {tag_name}", self.local_dir).stdout.strip() + except subprocess.CalledProcessError as exc: + raise EnvironmentError(exc.stderr) + + return True + + def add_tag(self, tag_name: str, message: Optional[str] = None, remote: Optional[str] = None): + """ + Add a tag at the current head and push it + + If remote is None, will just be updated locally + + If no message is provided, the tag will be lightweight. if a message is + provided, the tag will be annotated. + + Args: + tag_name (`str`): + The name of the tag to be added. + message (`str`, *optional*): + The message that accompanies the tag. The tag will turn into an + annotated tag if a message is passed. + remote (`str`, *optional*): + The remote on which to add the tag. + """ + if message: + tag_args = ["git", "tag", "-a", tag_name, "-m", message] + else: + tag_args = ["git", "tag", tag_name] + + try: + run_subprocess(tag_args, self.local_dir).stdout.strip() + except subprocess.CalledProcessError as exc: + raise EnvironmentError(exc.stderr) + + if remote: + try: + run_subprocess(f"git push {remote} {tag_name}", self.local_dir).stdout.strip() + except subprocess.CalledProcessError as exc: + raise EnvironmentError(exc.stderr) + + def is_repo_clean(self) -> bool: + """ + Return whether or not the git status is clean or not + + Returns: + `bool`: `True` if the git status is clean, `False` otherwise. + """ + try: + git_status = run_subprocess("git status --porcelain", self.local_dir).stdout.strip() + except subprocess.CalledProcessError as exc: + raise EnvironmentError(exc.stderr) + + return len(git_status) == 0 + + def push_to_hub( + self, + commit_message: str = "commit files to HF hub", + blocking: bool = True, + clean_ok: bool = True, + auto_lfs_prune: bool = False, + ) -> Union[None, str, Tuple[str, CommandInProgress]]: + """ + Helper to add, commit, and push files to remote repository on the + HuggingFace Hub. Will automatically track large files (>10MB). + + Args: + commit_message (`str`): + Message to use for the commit. + blocking (`bool`, *optional*, defaults to `True`): + Whether the function should return only when the `git push` has + finished. + clean_ok (`bool`, *optional*, defaults to `True`): + If True, this function will return None if the repo is + untouched. Default behavior is to fail because the git command + fails. + auto_lfs_prune (`bool`, *optional*, defaults to `False`): + Whether to automatically prune files once they have been pushed + to the remote. + """ + if clean_ok and self.is_repo_clean(): + logger.info("Repo currently clean. Ignoring push_to_hub") + return None + self.git_add(auto_lfs_track=True) + self.git_commit(commit_message) + return self.git_push( + upstream=f"origin {self.current_branch}", + blocking=blocking, + auto_lfs_prune=auto_lfs_prune, + ) + + @contextmanager + def commit( + self, + commit_message: str, + branch: Optional[str] = None, + track_large_files: bool = True, + blocking: bool = True, + auto_lfs_prune: bool = False, + ): + """ + Context manager utility to handle committing to a repository. This + automatically tracks large files (>10Mb) with git-lfs. Set the + `track_large_files` argument to `False` if you wish to ignore that + behavior. + + Args: + commit_message (`str`): + Message to use for the commit. + branch (`str`, *optional*): + The branch on which the commit will appear. This branch will be + checked-out before any operation. + track_large_files (`bool`, *optional*, defaults to `True`): + Whether to automatically track large files or not. Will do so by + default. + blocking (`bool`, *optional*, defaults to `True`): + Whether the function should return only when the `git push` has + finished. + auto_lfs_prune (`bool`, defaults to `True`): + Whether to automatically prune files once they have been pushed + to the remote. + + Examples: + + ```python + >>> with Repository( + ... "text-files", + ... clone_from="/text-files", + ... token=True, + >>> ).commit("My first file :)"): + ... with open("file.txt", "w+") as f: + ... f.write(json.dumps({"hey": 8})) + + >>> import torch + + >>> model = torch.nn.Transformer() + >>> with Repository( + ... "torch-model", + ... clone_from="/torch-model", + ... token=True, + >>> ).commit("My cool model :)"): + ... torch.save(model.state_dict(), "model.pt") + ``` + + """ + + files_to_stage = files_to_be_staged(".", folder=self.local_dir) + + if len(files_to_stage): + files_in_msg = str(files_to_stage[:5])[:-1] + ", ...]" if len(files_to_stage) > 5 else str(files_to_stage) + logger.error( + "There exists some updated files in the local repository that are not" + f" committed: {files_in_msg}. This may lead to errors if checking out" + " a branch. These files and their modifications will be added to the" + " current commit." + ) + + if branch is not None: + self.git_checkout(branch, create_branch_ok=True) + + if is_tracked_upstream(self.local_dir): + logger.warning("Pulling changes ...") + self.git_pull(rebase=True) + else: + logger.warning(f"The current branch has no upstream branch. Will push to 'origin {self.current_branch}'") + + current_working_directory = os.getcwd() + os.chdir(os.path.join(current_working_directory, self.local_dir)) + + try: + yield self + finally: + self.git_add(auto_lfs_track=track_large_files) + + try: + self.git_commit(commit_message) + except OSError as e: + # If no changes are detected, there is nothing to commit. + if "nothing to commit" not in str(e): + raise e + + try: + self.git_push( + upstream=f"origin {self.current_branch}", + blocking=blocking, + auto_lfs_prune=auto_lfs_prune, + ) + except OSError as e: + # If no changes are detected, there is nothing to commit. + if "could not read Username" in str(e): + raise OSError("Couldn't authenticate user for push. Did you set `token` to `True`?") from e + else: + raise e + + os.chdir(current_working_directory) + + def repocard_metadata_load(self) -> Optional[Dict]: + filepath = os.path.join(self.local_dir, constants.REPOCARD_NAME) + if os.path.isfile(filepath): + return metadata_load(filepath) + return None + + def repocard_metadata_save(self, data: Dict) -> None: + return metadata_save(os.path.join(self.local_dir, constants.REPOCARD_NAME), data) + + @property + def commands_failed(self): + """ + Returns the asynchronous commands that failed. + """ + return [c for c in self.command_queue if c.status > 0] + + @property + def commands_in_progress(self): + """ + Returns the asynchronous commands that are currently in progress. + """ + return [c for c in self.command_queue if not c.is_done] + + def wait_for_commands(self): + """ + Blocking method: blocks all subsequent execution until all commands have + been processed. + """ + index = 0 + for command_failed in self.commands_failed: + logger.error(f"The {command_failed.title} command with PID {command_failed._process.pid} failed.") + logger.error(command_failed.stderr) + + while self.commands_in_progress: + if index % 10 == 0: + logger.warning( + f"Waiting for the following commands to finish before shutting down: {self.commands_in_progress}." + ) + + index += 1 + + time.sleep(1) diff --git a/venv/lib/python3.13/site-packages/packaging-25.0.dist-info/INSTALLER b/venv/lib/python3.13/site-packages/packaging-25.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.13/site-packages/packaging-25.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.13/site-packages/packaging-25.0.dist-info/METADATA b/venv/lib/python3.13/site-packages/packaging-25.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..10b290a6cd1770506ae281a5fa469f6c110c364c --- /dev/null +++ b/venv/lib/python3.13/site-packages/packaging-25.0.dist-info/METADATA @@ -0,0 +1,105 @@ +Metadata-Version: 2.4 +Name: packaging +Version: 25.0 +Summary: Core utilities for Python packages +Author-email: Donald Stufft +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Typing :: Typed +License-File: LICENSE +License-File: LICENSE.APACHE +License-File: LICENSE.BSD +Project-URL: Documentation, https://packaging.pypa.io/ +Project-URL: Source, https://github.com/pypa/packaging + +packaging +========= + +.. start-intro + +Reusable core utilities for various Python Packaging +`interoperability specifications `_. + +This library provides utilities that implement the interoperability +specifications which have clearly one correct behaviour (eg: :pep:`440`) +or benefit greatly from having a single shared implementation (eg: :pep:`425`). + +.. end-intro + +The ``packaging`` project includes the following: version handling, specifiers, +markers, requirements, tags, utilities. + +Documentation +------------- + +The `documentation`_ provides information and the API for the following: + +- Version Handling +- Specifiers +- Markers +- Requirements +- Tags +- Utilities + +Installation +------------ + +Use ``pip`` to install these utilities:: + + pip install packaging + +The ``packaging`` library uses calendar-based versioning (``YY.N``). + +Discussion +---------- + +If you run into bugs, you can file them in our `issue tracker`_. + +You can also join ``#pypa`` on Freenode to ask questions or get involved. + + +.. _`documentation`: https://packaging.pypa.io/ +.. _`issue tracker`: https://github.com/pypa/packaging/issues + + +Code of Conduct +--------------- + +Everyone interacting in the packaging project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. + +.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md + +Contributing +------------ + +The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as +well as how to report a potential security issue. The documentation for this +project also covers information about `project development`_ and `security`_. + +.. _`project development`: https://packaging.pypa.io/en/latest/development/ +.. _`security`: https://packaging.pypa.io/en/latest/security/ + +Project History +--------------- + +Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for +recent changes and project history. + +.. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/ + diff --git a/venv/lib/python3.13/site-packages/packaging-25.0.dist-info/RECORD b/venv/lib/python3.13/site-packages/packaging-25.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..ca1a90f43b8dde941e1bca91029c2263423f9506 --- /dev/null +++ b/venv/lib/python3.13/site-packages/packaging-25.0.dist-info/RECORD @@ -0,0 +1,40 @@ +packaging-25.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +packaging-25.0.dist-info/METADATA,sha256=W2EaYJw4_vw9YWv0XSCuyY-31T8kXayp4sMPyFx6woI,3281 +packaging-25.0.dist-info/RECORD,, +packaging-25.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82 +packaging-25.0.dist-info/licenses/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197 +packaging-25.0.dist-info/licenses/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174 +packaging-25.0.dist-info/licenses/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344 +packaging/__init__.py,sha256=_0cDiPVf2S-bNfVmZguxxzmrIYWlyASxpqph4qsJWUc,494 +packaging/__pycache__/__init__.cpython-313.pyc,, +packaging/__pycache__/_elffile.cpython-313.pyc,, +packaging/__pycache__/_manylinux.cpython-313.pyc,, +packaging/__pycache__/_musllinux.cpython-313.pyc,, +packaging/__pycache__/_parser.cpython-313.pyc,, +packaging/__pycache__/_structures.cpython-313.pyc,, +packaging/__pycache__/_tokenizer.cpython-313.pyc,, +packaging/__pycache__/markers.cpython-313.pyc,, +packaging/__pycache__/metadata.cpython-313.pyc,, +packaging/__pycache__/requirements.cpython-313.pyc,, +packaging/__pycache__/specifiers.cpython-313.pyc,, +packaging/__pycache__/tags.cpython-313.pyc,, +packaging/__pycache__/utils.cpython-313.pyc,, +packaging/__pycache__/version.cpython-313.pyc,, +packaging/_elffile.py,sha256=UkrbDtW7aeq3qqoAfU16ojyHZ1xsTvGke_WqMTKAKd0,3286 +packaging/_manylinux.py,sha256=t4y_-dTOcfr36gLY-ztiOpxxJFGO2ikC11HgfysGxiM,9596 +packaging/_musllinux.py,sha256=p9ZqNYiOItGee8KcZFeHF_YcdhVwGHdK6r-8lgixvGQ,2694 +packaging/_parser.py,sha256=gYfnj0pRHflVc4RHZit13KNTyN9iiVcU2RUCGi22BwM,10221 +packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431 +packaging/_tokenizer.py,sha256=OYzt7qKxylOAJ-q0XyK1qAycyPRYLfMPdGQKRXkZWyI,5310 +packaging/licenses/__init__.py,sha256=VsK4o27CJXWfTi8r2ybJmsBoCdhpnBWuNrskaCVKP7U,5715 +packaging/licenses/__pycache__/__init__.cpython-313.pyc,, +packaging/licenses/__pycache__/_spdx.cpython-313.pyc,, +packaging/licenses/_spdx.py,sha256=oAm1ztPFwlsmCKe7lAAsv_OIOfS1cWDu9bNBkeu-2ns,48398 +packaging/markers.py,sha256=P0we27jm1xUzgGMJxBjtUFCIWeBxTsMeJTOJ6chZmAY,12049 +packaging/metadata.py,sha256=8IZErqQQnNm53dZZuYq4FGU4_dpyinMeH1QFBIWIkfE,34739 +packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +packaging/requirements.py,sha256=gYyRSAdbrIyKDY66ugIDUQjRMvxkH2ALioTmX3tnL6o,2947 +packaging/specifiers.py,sha256=gtPu5DTc-F9baLq3FTGEK6dPhHGCuwwZetaY0PSV2gs,40055 +packaging/tags.py,sha256=41s97W9Zatrq2Ed7Rc3qeBDaHe8pKKvYq2mGjwahfXk,22745 +packaging/utils.py,sha256=0F3Hh9OFuRgrhTgGZUl5K22Fv1YP2tZl1z_2gO6kJiA,5050 +packaging/version.py,sha256=olfyuk_DPbflNkJ4wBWetXQ17c74x3DB501degUv7DY,16676 diff --git a/venv/lib/python3.13/site-packages/packaging-25.0.dist-info/WHEEL b/venv/lib/python3.13/site-packages/packaging-25.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..d8b9936dad9ab2513fa6979f411560d3b6b57e37 --- /dev/null +++ b/venv/lib/python3.13/site-packages/packaging-25.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.12.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.13/site-packages/pip-25.2.dist-info/INSTALLER b/venv/lib/python3.13/site-packages/pip-25.2.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.13/site-packages/pip-25.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.13/site-packages/pip-25.2.dist-info/METADATA b/venv/lib/python3.13/site-packages/pip-25.2.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..2cf904bc205ba3bfbbd5ab65edeab062bddcb609 --- /dev/null +++ b/venv/lib/python3.13/site-packages/pip-25.2.dist-info/METADATA @@ -0,0 +1,112 @@ +Metadata-Version: 2.4 +Name: pip +Version: 25.2 +Summary: The PyPA recommended tool for installing Python packages. +Author-email: The pip developers +License-Expression: MIT +Project-URL: Homepage, https://pip.pypa.io/ +Project-URL: Documentation, https://pip.pypa.io +Project-URL: Source, https://github.com/pypa/pip +Project-URL: Changelog, https://pip.pypa.io/en/stable/news/ +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Topic :: Software Development :: Build Tools +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Python: >=3.9 +Description-Content-Type: text/x-rst +License-File: AUTHORS.txt +License-File: LICENSE.txt +License-File: src/pip/_vendor/msgpack/COPYING +License-File: src/pip/_vendor/cachecontrol/LICENSE.txt +License-File: src/pip/_vendor/certifi/LICENSE +License-File: src/pip/_vendor/dependency_groups/LICENSE.txt +License-File: src/pip/_vendor/distlib/LICENSE.txt +License-File: src/pip/_vendor/distro/LICENSE +License-File: src/pip/_vendor/idna/LICENSE.md +License-File: src/pip/_vendor/packaging/LICENSE +License-File: src/pip/_vendor/packaging/LICENSE.APACHE +License-File: src/pip/_vendor/packaging/LICENSE.BSD +License-File: src/pip/_vendor/pkg_resources/LICENSE +License-File: src/pip/_vendor/platformdirs/LICENSE +License-File: src/pip/_vendor/pygments/LICENSE +License-File: src/pip/_vendor/pyproject_hooks/LICENSE +License-File: src/pip/_vendor/requests/LICENSE +License-File: src/pip/_vendor/resolvelib/LICENSE +License-File: src/pip/_vendor/rich/LICENSE +License-File: src/pip/_vendor/tomli/LICENSE +License-File: src/pip/_vendor/tomli/LICENSE-HEADER +License-File: src/pip/_vendor/tomli_w/LICENSE +License-File: src/pip/_vendor/truststore/LICENSE +License-File: src/pip/_vendor/urllib3/LICENSE.txt +Dynamic: license-file + +pip - The Python Package Installer +================================== + +.. |pypi-version| image:: https://img.shields.io/pypi/v/pip.svg + :target: https://pypi.org/project/pip/ + :alt: PyPI + +.. |python-versions| image:: https://img.shields.io/pypi/pyversions/pip + :target: https://pypi.org/project/pip + :alt: PyPI - Python Version + +.. |docs-badge| image:: https://readthedocs.org/projects/pip/badge/?version=latest + :target: https://pip.pypa.io/en/latest + :alt: Documentation + +|pypi-version| |python-versions| |docs-badge| + +pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes. + +Please take a look at our documentation for how to install and use pip: + +* `Installation`_ +* `Usage`_ + +We release updates regularly, with a new version every 3 months. Find more details in our documentation: + +* `Release notes`_ +* `Release process`_ + +If you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms: + +* `Issue tracking`_ +* `Discourse channel`_ +* `User IRC`_ + +If you want to get involved head over to GitHub to get the source code, look at our development documentation and feel free to jump on the developer mailing lists and chat rooms: + +* `GitHub page`_ +* `Development documentation`_ +* `Development IRC`_ + +Code of Conduct +--------------- + +Everyone interacting in the pip project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. + +.. _package installer: https://packaging.python.org/guides/tool-recommendations/ +.. _Python Package Index: https://pypi.org +.. _Installation: https://pip.pypa.io/en/stable/installation/ +.. _Usage: https://pip.pypa.io/en/stable/ +.. _Release notes: https://pip.pypa.io/en/stable/news.html +.. _Release process: https://pip.pypa.io/en/latest/development/release-process/ +.. _GitHub page: https://github.com/pypa/pip +.. _Development documentation: https://pip.pypa.io/en/latest/development +.. _Issue tracking: https://github.com/pypa/pip/issues +.. _Discourse channel: https://discuss.python.org/c/packaging +.. _User IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa +.. _Development IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa-dev +.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md diff --git a/venv/lib/python3.13/site-packages/pip-25.2.dist-info/RECORD b/venv/lib/python3.13/site-packages/pip-25.2.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..e38b62dd56e609968739e4f016f140560ac897c4 --- /dev/null +++ b/venv/lib/python3.13/site-packages/pip-25.2.dist-info/RECORD @@ -0,0 +1,854 @@ +../../../bin/pip,sha256=-bj5D2jPR_C50EwQfPbzi8VOhmIJ9IqkD0V8UYyrksM,213 +../../../bin/pip3,sha256=-bj5D2jPR_C50EwQfPbzi8VOhmIJ9IqkD0V8UYyrksM,213 +../../../bin/pip3.13,sha256=-bj5D2jPR_C50EwQfPbzi8VOhmIJ9IqkD0V8UYyrksM,213 +pip-25.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pip-25.2.dist-info/METADATA,sha256=l6OtFNcf2JFKOPJK9bMaH5-usFrqZqSvQNl51tetIp8,4744 +pip-25.2.dist-info/RECORD,, +pip-25.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip-25.2.dist-info/WHEEL,sha256=lTU6B6eIfYoiQJTZNc-fyaR6BpL6ehTzU3xGYxn2n8k,91 +pip-25.2.dist-info/entry_points.txt,sha256=eeIjuzfnfR2PrhbjnbzFU6MnSS70kZLxwaHHq6M-bD0,87 +pip-25.2.dist-info/licenses/AUTHORS.txt,sha256=ioEfMGkkizcTcIPdvjh-kYymO1E9I9dvzHjLUlKS5m8,11385 +pip-25.2.dist-info/licenses/LICENSE.txt,sha256=Y0MApmnUmurmWxLGxIySTFGkzfPR_whtw0VtyLyqIQQ,1093 +pip-25.2.dist-info/licenses/src/pip/_vendor/cachecontrol/LICENSE.txt,sha256=hu7uh74qQ_P_H1ZJb0UfaSQ5JvAl_tuwM2ZsMExMFhs,558 +pip-25.2.dist-info/licenses/src/pip/_vendor/certifi/LICENSE,sha256=6TcW2mucDVpKHfYP5pWzcPBpVgPSH2-D8FPkLPwQyvc,989 +pip-25.2.dist-info/licenses/src/pip/_vendor/dependency_groups/LICENSE.txt,sha256=GrNuPipLqGMWJThPh-ngkdsfrtA0xbIzJbMjmr8sxSU,1099 +pip-25.2.dist-info/licenses/src/pip/_vendor/distlib/LICENSE.txt,sha256=gI4QyKarjesUn_mz-xn0R6gICUYG1xKpylf-rTVSWZ0,14531 +pip-25.2.dist-info/licenses/src/pip/_vendor/distro/LICENSE,sha256=y16Ofl9KOYjhBjwULGDcLfdWBfTEZRXnduOspt-XbhQ,11325 +pip-25.2.dist-info/licenses/src/pip/_vendor/idna/LICENSE.md,sha256=pZ8LDvNjWHQQmkRhykT_enDVBpboFHZ7-vch1Mmw2w8,1541 +pip-25.2.dist-info/licenses/src/pip/_vendor/msgpack/COPYING,sha256=SS3tuoXaWHL3jmCRvNH-pHTWYNNay03ulkuKqz8AdCc,614 +pip-25.2.dist-info/licenses/src/pip/_vendor/packaging/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197 +pip-25.2.dist-info/licenses/src/pip/_vendor/packaging/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174 +pip-25.2.dist-info/licenses/src/pip/_vendor/packaging/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344 +pip-25.2.dist-info/licenses/src/pip/_vendor/pkg_resources/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023 +pip-25.2.dist-info/licenses/src/pip/_vendor/platformdirs/LICENSE,sha256=KeD9YukphQ6G6yjD_czwzv30-pSHkBHP-z0NS-1tTbY,1089 +pip-25.2.dist-info/licenses/src/pip/_vendor/pygments/LICENSE,sha256=qdZvHVJt8C4p3Oc0NtNOVuhjL0bCdbvf_HBWnogvnxc,1331 +pip-25.2.dist-info/licenses/src/pip/_vendor/pyproject_hooks/LICENSE,sha256=GyKwSbUmfW38I6Z79KhNjsBLn9-xpR02DkK0NCyLQVQ,1081 +pip-25.2.dist-info/licenses/src/pip/_vendor/requests/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142 +pip-25.2.dist-info/licenses/src/pip/_vendor/resolvelib/LICENSE,sha256=84j9OMrRMRLB3A9mm76A5_hFQe26-3LzAw0sp2QsPJ0,751 +pip-25.2.dist-info/licenses/src/pip/_vendor/rich/LICENSE,sha256=3u18F6QxgVgZCj6iOcyHmlpQJxzruYrnAl9I--WNyhU,1056 +pip-25.2.dist-info/licenses/src/pip/_vendor/tomli/LICENSE,sha256=uAgWsNUwuKzLTCIReDeQmEpuO2GSLCte6S8zcqsnQv4,1072 +pip-25.2.dist-info/licenses/src/pip/_vendor/tomli/LICENSE-HEADER,sha256=l86TMJBaFy3ehw7gNh2Jvrlbo70PRUV5aqkajAGkNTE,121 +pip-25.2.dist-info/licenses/src/pip/_vendor/tomli_w/LICENSE,sha256=uAgWsNUwuKzLTCIReDeQmEpuO2GSLCte6S8zcqsnQv4,1072 +pip-25.2.dist-info/licenses/src/pip/_vendor/truststore/LICENSE,sha256=M757fo-k_Rmxdg4ajtimaL2rhSyRtpLdQUJLy3Jan8o,1086 +pip-25.2.dist-info/licenses/src/pip/_vendor/urllib3/LICENSE.txt,sha256=w3vxhuJ8-dvpYZ5V7f486nswCRzrPaY8fay-Dm13kHs,1115 +pip-25.2.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pip/__init__.py,sha256=_lgs5Mfp0t7AGtI7sTVwxKWquz_vahWWH9kKO1cJusA,353 +pip/__main__.py,sha256=WzbhHXTbSE6gBY19mNN9m4s5o_365LOvTYSgqgbdBhE,854 +pip/__pip-runner__.py,sha256=JOoEZTwrtv7jRaXBkgSQKAE04yNyfFmGHxqpHiGHvL0,1450 +pip/__pycache__/__init__.cpython-313.pyc,, +pip/__pycache__/__main__.cpython-313.pyc,, +pip/__pycache__/__pip-runner__.cpython-313.pyc,, +pip/_internal/__init__.py,sha256=S7i9Dn9aSZS0MG-2Wrve3dV9TImPzvQn5jjhp9t_uf0,511 +pip/_internal/__pycache__/__init__.cpython-313.pyc,, +pip/_internal/__pycache__/build_env.cpython-313.pyc,, +pip/_internal/__pycache__/cache.cpython-313.pyc,, +pip/_internal/__pycache__/configuration.cpython-313.pyc,, +pip/_internal/__pycache__/exceptions.cpython-313.pyc,, +pip/_internal/__pycache__/main.cpython-313.pyc,, +pip/_internal/__pycache__/pyproject.cpython-313.pyc,, +pip/_internal/__pycache__/self_outdated_check.cpython-313.pyc,, +pip/_internal/__pycache__/wheel_builder.cpython-313.pyc,, +pip/_internal/build_env.py,sha256=5_-O5G0QtCeFKHAezqAz0IZ5_O7qCYZ-Bxddu9idpYs,11566 +pip/_internal/cache.py,sha256=rWXvsuFY7GgPERhiNp3yZHFGKdvDvkZRn2n4mXn8lAg,10364 +pip/_internal/cli/__init__.py,sha256=Iqg_tKA771XuMO1P4t_sDHnSKPzkUb9D0DqunAmw_ko,131 +pip/_internal/cli/__pycache__/__init__.cpython-313.pyc,, +pip/_internal/cli/__pycache__/autocompletion.cpython-313.pyc,, +pip/_internal/cli/__pycache__/base_command.cpython-313.pyc,, +pip/_internal/cli/__pycache__/cmdoptions.cpython-313.pyc,, +pip/_internal/cli/__pycache__/command_context.cpython-313.pyc,, +pip/_internal/cli/__pycache__/index_command.cpython-313.pyc,, +pip/_internal/cli/__pycache__/main.cpython-313.pyc,, +pip/_internal/cli/__pycache__/main_parser.cpython-313.pyc,, +pip/_internal/cli/__pycache__/parser.cpython-313.pyc,, +pip/_internal/cli/__pycache__/progress_bars.cpython-313.pyc,, +pip/_internal/cli/__pycache__/req_command.cpython-313.pyc,, +pip/_internal/cli/__pycache__/spinners.cpython-313.pyc,, +pip/_internal/cli/__pycache__/status_codes.cpython-313.pyc,, +pip/_internal/cli/autocompletion.py,sha256=ZG2cM03nlcNrs-WG_SFTW46isx9s2Go5lUD_8-iv70o,7193 +pip/_internal/cli/base_command.py,sha256=1Nx919JRFlgURLis9XYJwtbyEEjRJa_NdHwM6iBkZvY,8716 +pip/_internal/cli/cmdoptions.py,sha256=AmLAJ0hLmh1ZJwFk3Y624anSbq5yu-NMiYbJKJE1YwM,32031 +pip/_internal/cli/command_context.py,sha256=kmu3EWZbfBega1oDamnGJTA_UaejhIQNuMj2CVmMXu0,817 +pip/_internal/cli/index_command.py,sha256=AHk6eSqboaxTXbG3v9mBrVd0dCK1MtW4w3PVudnj0WE,5717 +pip/_internal/cli/main.py,sha256=K9PtpRdg6uBrVKk8S2VZ14fAN0kP-cnA1o-FtJCN_OQ,2815 +pip/_internal/cli/main_parser.py,sha256=UugPD-hF1WtNQdow_WWduDLUH1DvElpc7EeUWjUkcNo,4329 +pip/_internal/cli/parser.py,sha256=sAOebBa5_0rRAlmBy8myMPFHQZb-bbCH1xpIJDaXaLs,10928 +pip/_internal/cli/progress_bars.py,sha256=nRTWNof-FjHfvirvECXIh7T7eAynTUVPTyHENfpbWiU,4668 +pip/_internal/cli/req_command.py,sha256=pXT_jAwcmO9PjJ1-Pl1VevBgr5pnxHsyPZUBzAfqQg8,13081 +pip/_internal/cli/spinners.py,sha256=EJzZIZNyUtJljp3-WjcsyIrqxW-HUsfWzhuW84n_Tqw,7362 +pip/_internal/cli/status_codes.py,sha256=sEFHUaUJbqv8iArL3HAtcztWZmGOFX01hTesSytDEh0,116 +pip/_internal/commands/__init__.py,sha256=aNeCbQurGWihfhQq7BqaLXHqWDQ0i3I04OS7kxK6plQ,4026 +pip/_internal/commands/__pycache__/__init__.cpython-313.pyc,, +pip/_internal/commands/__pycache__/cache.cpython-313.pyc,, +pip/_internal/commands/__pycache__/check.cpython-313.pyc,, +pip/_internal/commands/__pycache__/completion.cpython-313.pyc,, +pip/_internal/commands/__pycache__/configuration.cpython-313.pyc,, +pip/_internal/commands/__pycache__/debug.cpython-313.pyc,, +pip/_internal/commands/__pycache__/download.cpython-313.pyc,, +pip/_internal/commands/__pycache__/freeze.cpython-313.pyc,, +pip/_internal/commands/__pycache__/hash.cpython-313.pyc,, +pip/_internal/commands/__pycache__/help.cpython-313.pyc,, +pip/_internal/commands/__pycache__/index.cpython-313.pyc,, +pip/_internal/commands/__pycache__/inspect.cpython-313.pyc,, +pip/_internal/commands/__pycache__/install.cpython-313.pyc,, +pip/_internal/commands/__pycache__/list.cpython-313.pyc,, +pip/_internal/commands/__pycache__/lock.cpython-313.pyc,, +pip/_internal/commands/__pycache__/search.cpython-313.pyc,, +pip/_internal/commands/__pycache__/show.cpython-313.pyc,, +pip/_internal/commands/__pycache__/uninstall.cpython-313.pyc,, +pip/_internal/commands/__pycache__/wheel.cpython-313.pyc,, +pip/_internal/commands/cache.py,sha256=OrrLS6EJEha_55yPa9fTaOaonw-VpH4_lVhjxuOTChQ,8230 +pip/_internal/commands/check.py,sha256=hVFBQezQ3zj4EydoWbFQj_afPUppMt7r9JPAlY22U6Y,2244 +pip/_internal/commands/completion.py,sha256=MDwhTOBjlM4WEbOhgbhrWnlDm710i4FMjop3RBXXXCc,4530 +pip/_internal/commands/configuration.py,sha256=6gNOGrVWnOLU15zUnAiNuOMhf76RRIZvCdVD0degPRk,10105 +pip/_internal/commands/debug.py,sha256=_8IqM8Fx1_lY2STu_qspr63tufF7zyFJCyYAXtxz0N4,6805 +pip/_internal/commands/download.py,sha256=GGBSxhORV0mrad8STgORfvjRGh1qS2plvpXFNbxgcps,5249 +pip/_internal/commands/freeze.py,sha256=JmMKqtSsbQOtV_MqTRTSUm84r2tNNXfCuPwcIbKlcMI,3170 +pip/_internal/commands/hash.py,sha256=GO9pRN3wXC2kQaovK57TaLYBMc3IltOH92O6QEw6YE0,1679 +pip/_internal/commands/help.py,sha256=Bz3LcjNQXkz4Cu__pL4CZ86o4-HNLZj1NZWdlJhjuu0,1108 +pip/_internal/commands/index.py,sha256=8GMBVI5NvhRRHBSUq27YxDIE02DpvdJ_6qiBFgGd1co,5243 +pip/_internal/commands/inspect.py,sha256=ogm4UT7LRo8bIQcWUS1IiA25QdD4VHLa7JaPAodDttM,3177 +pip/_internal/commands/install.py,sha256=AIO-Um49e3GqMOk2SmHCt45A0W0-YhKXcpr5okjHh80,30080 +pip/_internal/commands/list.py,sha256=-MYjPb1g8XndMqzskpfoCdfg9it_u7vghRspgUF2KrU,13611 +pip/_internal/commands/lock.py,sha256=SxXGnU2EH-TG-fs9fuZHAuMOH15Lzy1K7e_KJ4vtSr0,5917 +pip/_internal/commands/search.py,sha256=zbMsX_YASj6kXA6XIBgTDv0bGK51xG-CV3IynZJcE-c,5782 +pip/_internal/commands/show.py,sha256=oLVJIfKWmDKm0SsQGEi3pozNiqrXjTras_fbBSYKpBA,8066 +pip/_internal/commands/uninstall.py,sha256=CsOihqvb6ZA6O67L70oXeoLHeOfNzMM88H9g-9aocgw,3868 +pip/_internal/commands/wheel.py,sha256=vcNgnhxwilocRQJEBdWQvJ8qvO0RfWmz9LwrUBadvYE,6322 +pip/_internal/configuration.py,sha256=BomQ3pC84J6zCXDQpND_OmFY7mubE9cMBZVohNUzMvY,14587 +pip/_internal/distributions/__init__.py,sha256=Hq6kt6gXBgjNit5hTTWLAzeCNOKoB-N0pGYSqehrli8,858 +pip/_internal/distributions/__pycache__/__init__.cpython-313.pyc,, +pip/_internal/distributions/__pycache__/base.cpython-313.pyc,, +pip/_internal/distributions/__pycache__/installed.cpython-313.pyc,, +pip/_internal/distributions/__pycache__/sdist.cpython-313.pyc,, +pip/_internal/distributions/__pycache__/wheel.cpython-313.pyc,, +pip/_internal/distributions/base.py,sha256=l-OTCAIs25lsapejA6IYpPZxSM5-BET4sdZDkql8jiY,1830 +pip/_internal/distributions/installed.py,sha256=kgIEE_1NzjZxLBSC-v5s64uOFZlVEt3aPrjTtL6x2XY,929 +pip/_internal/distributions/sdist.py,sha256=gYgrzI0lstHJRKweOdL_m6LtqDxtfuo_yCL9HjmH-xw,6952 +pip/_internal/distributions/wheel.py,sha256=_HbG0OehF8dwj4UX-xV__tXLwgPus9OjMEf2NTRqBbE,1364 +pip/_internal/exceptions.py,sha256=bRTEn6Jlw-vxHjixtiu0K79jXJu0X5FVC0L62Bdb0KI,28974 +pip/_internal/index/__init__.py,sha256=tzwMH_fhQeubwMqHdSivasg1cRgTSbNg2CiMVnzMmyU,29 +pip/_internal/index/__pycache__/__init__.cpython-313.pyc,, +pip/_internal/index/__pycache__/collector.cpython-313.pyc,, +pip/_internal/index/__pycache__/package_finder.cpython-313.pyc,, +pip/_internal/index/__pycache__/sources.cpython-313.pyc,, +pip/_internal/index/collector.py,sha256=PCB3thVWRiSBowGtpv1elIPFc-GvEqhZiNgZD7b0vBc,16185 +pip/_internal/index/package_finder.py,sha256=Kd2FmWJFAcEg2Sy2pTGQX8WPFC8B9U_3b1VK7i8sx9o,38827 +pip/_internal/index/sources.py,sha256=nXJkOjhLy-O2FsrKU9RIqCOqgY2PsoKWybtZjjRgqU0,8639 +pip/_internal/locations/__init__.py,sha256=2SADX0Gr9BIpx19AO7Feq89nOmBQGEbl1IWjBpnaE9E,14185 +pip/_internal/locations/__pycache__/__init__.cpython-313.pyc,, +pip/_internal/locations/__pycache__/_distutils.cpython-313.pyc,, +pip/_internal/locations/__pycache__/_sysconfig.cpython-313.pyc,, +pip/_internal/locations/__pycache__/base.cpython-313.pyc,, +pip/_internal/locations/_distutils.py,sha256=jpFj4V00rD9IR3vA9TqrGkwcdNVFc58LsChZavge9JY,5975 +pip/_internal/locations/_sysconfig.py,sha256=NhcEi1_25w9cTTcH4RyOjD4UHW6Ijks0uKy1PL1_j_8,7716 +pip/_internal/locations/base.py,sha256=AImjYJWxOtDkc0KKc6Y4Gz677cg91caMA4L94B9FZEg,2550 +pip/_internal/main.py,sha256=1cHqjsfFCrMFf3B5twzocxTJUdHMLoXUpy5lJoFqUi8,338 +pip/_internal/metadata/__init__.py,sha256=a19B00IsX25khBt8oWWy6_kKgjCM4zeh-FqYteCOFwA,5714 +pip/_internal/metadata/__pycache__/__init__.cpython-313.pyc,, +pip/_internal/metadata/__pycache__/_json.cpython-313.pyc,, +pip/_internal/metadata/__pycache__/base.cpython-313.pyc,, +pip/_internal/metadata/__pycache__/pkg_resources.cpython-313.pyc,, +pip/_internal/metadata/_json.py,sha256=hNvnMHOXLAyNlzirWhPL9Nx2CvCqa1iRma6Osq1YfV8,2711 +pip/_internal/metadata/base.py,sha256=BGuMenlcQT8i7j9iclrfdC3vSwgvhr8gjn955cCy16s,25420 +pip/_internal/metadata/importlib/__init__.py,sha256=jUUidoxnHcfITHHaAWG1G2i5fdBYklv_uJcjo2x7VYE,135 +pip/_internal/metadata/importlib/__pycache__/__init__.cpython-313.pyc,, +pip/_internal/metadata/importlib/__pycache__/_compat.cpython-313.pyc,, +pip/_internal/metadata/importlib/__pycache__/_dists.cpython-313.pyc,, +pip/_internal/metadata/importlib/__pycache__/_envs.cpython-313.pyc,, +pip/_internal/metadata/importlib/_compat.py,sha256=sneVh4_6WxQZK4ljdl3ylVuP-q0ttSqbgl9mWt0HnOg,2804 +pip/_internal/metadata/importlib/_dists.py,sha256=aOUgCX_AtA8B-lWLu-HfXA-ivMJWNxHPAohVcpQj2g4,8259 +pip/_internal/metadata/importlib/_envs.py,sha256=H3qVLXVh4LWvrPvu_ekXf3dfbtwnlhNJQP2pxXpccfU,5333 +pip/_internal/metadata/pkg_resources.py,sha256=NO76ZrfR2-LKJTyaXrmQoGhmJMArALvacrlZHViSDT8,10544 +pip/_internal/models/__init__.py,sha256=AjmCEBxX_MH9f_jVjIGNCFJKYCYeSEe18yyvNx4uRKQ,62 +pip/_internal/models/__pycache__/__init__.cpython-313.pyc,, +pip/_internal/models/__pycache__/candidate.cpython-313.pyc,, +pip/_internal/models/__pycache__/direct_url.cpython-313.pyc,, +pip/_internal/models/__pycache__/format_control.cpython-313.pyc,, +pip/_internal/models/__pycache__/index.cpython-313.pyc,, +pip/_internal/models/__pycache__/installation_report.cpython-313.pyc,, +pip/_internal/models/__pycache__/link.cpython-313.pyc,, +pip/_internal/models/__pycache__/pylock.cpython-313.pyc,, +pip/_internal/models/__pycache__/scheme.cpython-313.pyc,, +pip/_internal/models/__pycache__/search_scope.cpython-313.pyc,, +pip/_internal/models/__pycache__/selection_prefs.cpython-313.pyc,, +pip/_internal/models/__pycache__/target_python.cpython-313.pyc,, +pip/_internal/models/__pycache__/wheel.cpython-313.pyc,, +pip/_internal/models/candidate.py,sha256=zzgFRuw_kWPjKpGw7LC0ZUMD2CQ2EberUIYs8izjdCA,753 +pip/_internal/models/direct_url.py,sha256=4NMWacu_QzPPWREC1te7v6Wfv-2HkI4tvSJF-CBgLh4,6555 +pip/_internal/models/format_control.py,sha256=PwemYG1L27BM0f1KP61rm24wShENFyxqlD1TWu34alc,2471 +pip/_internal/models/index.py,sha256=tYnL8oxGi4aSNWur0mG8DAP7rC6yuha_MwJO8xw0crI,1030 +pip/_internal/models/installation_report.py,sha256=cqfWJ93ThCxjcacqSWryOCD2XtIn1CZrgzZxAv5FQZ0,2839 +pip/_internal/models/link.py,sha256=h4lI8MaDA7DTIxIJ_NgDE64EE4h1sX3AB23Y1Qu8W-k,21793 +pip/_internal/models/pylock.py,sha256=Vmaa71gOSV0ZYzRgWiIm4KwVbClaahMcuvKCkP_ZznA,6211 +pip/_internal/models/scheme.py,sha256=PakmHJM3e8OOWSZFtfz1Az7f1meONJnkGuQxFlt3wBE,575 +pip/_internal/models/search_scope.py,sha256=1hxU2IVsAaLZVjp0CbzJbYaYzCxv72_Qbg3JL0qhXo0,4507 +pip/_internal/models/selection_prefs.py,sha256=lgYyo4W8lb22wsYx2ElBBB0cvSNlBVgucwBzL43dfzE,2016 +pip/_internal/models/target_python.py,sha256=I0eFS-eia3kwhrOvgsphFZtNAB2IwXZ9Sr9fp6IjBP4,4243 +pip/_internal/models/wheel.py,sha256=xWO0K-YanSL3OfMZUN1gHlEUV0YJOVYMQ4wrwmA9Zis,5526 +pip/_internal/network/__init__.py,sha256=FMy06P__y6jMjUc8z3ZcQdKF-pmZ2zM14_vBeHPGhUI,49 +pip/_internal/network/__pycache__/__init__.cpython-313.pyc,, +pip/_internal/network/__pycache__/auth.cpython-313.pyc,, +pip/_internal/network/__pycache__/cache.cpython-313.pyc,, +pip/_internal/network/__pycache__/download.cpython-313.pyc,, +pip/_internal/network/__pycache__/lazy_wheel.cpython-313.pyc,, +pip/_internal/network/__pycache__/session.cpython-313.pyc,, +pip/_internal/network/__pycache__/utils.cpython-313.pyc,, +pip/_internal/network/__pycache__/xmlrpc.cpython-313.pyc,, +pip/_internal/network/auth.py,sha256=uAwRGAYnVtgNSZm4HMC3BMACkgA7ku4m8wupiX6LpK8,20681 +pip/_internal/network/cache.py,sha256=u5LtQbnCzMU6vFT0eUUcYe6iKiwALMqrn1jUZj4CqaY,5302 +pip/_internal/network/download.py,sha256=HgsFvTkPDdgg0zUehose_J-542-9R0FpyipRw5BhxAM,12682 +pip/_internal/network/lazy_wheel.py,sha256=5H7_s-_AK6br1K5NFcyUsiocK9E6vwrJY5kzwjMv0EM,7651 +pip/_internal/network/session.py,sha256=eE-VUIJGU9YeeaVy7tVAvMRWigMsyuAMpxkjlbptbjo,19188 +pip/_internal/network/utils.py,sha256=ACsXd1msqNCidHVXsu7LHUSr8NgaypcOKQ4KG-Z_wJM,4091 +pip/_internal/network/xmlrpc.py,sha256=_-Rnk3vOff8uF9hAGmT6SLALflY1gMBcbGwS12fb_Y4,1830 +pip/_internal/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/operations/__pycache__/__init__.cpython-313.pyc,, +pip/_internal/operations/__pycache__/check.cpython-313.pyc,, +pip/_internal/operations/__pycache__/freeze.cpython-313.pyc,, +pip/_internal/operations/__pycache__/prepare.cpython-313.pyc,, +pip/_internal/operations/build/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/operations/build/__pycache__/__init__.cpython-313.pyc,, +pip/_internal/operations/build/__pycache__/build_tracker.cpython-313.pyc,, +pip/_internal/operations/build/__pycache__/metadata.cpython-313.pyc,, +pip/_internal/operations/build/__pycache__/metadata_editable.cpython-313.pyc,, +pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-313.pyc,, +pip/_internal/operations/build/__pycache__/wheel.cpython-313.pyc,, +pip/_internal/operations/build/__pycache__/wheel_editable.cpython-313.pyc,, +pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-313.pyc,, +pip/_internal/operations/build/build_tracker.py,sha256=W3b5cmkMWPaE6QIwfzsTayJo7-OlxFHWDxfPuax1KcE,4771 +pip/_internal/operations/build/metadata.py,sha256=INHaeiRfOiLYCXApfDNRo9Cw2xI4VwTc0KItvfdfOjk,1421 +pip/_internal/operations/build/metadata_editable.py,sha256=oWudMsnjy4loO_Jy7g4N9nxsnaEX_iDlVRgCy7pu1rs,1509 +pip/_internal/operations/build/metadata_legacy.py,sha256=wv8cFA0wTqF62Jlm9QwloYZsofOyQ7sWBBmvCcVvn1k,2189 +pip/_internal/operations/build/wheel.py,sha256=toBJE5atKJGsSckY5ztVAQpRss25f049f-nJaiJsiD8,1080 +pip/_internal/operations/build/wheel_editable.py,sha256=Djx7hZqhejgYUcdQD5p4mn7AVFN3VQ7bOSs7b90TtOI,1422 +pip/_internal/operations/build/wheel_legacy.py,sha256=LaHpG4_s6415iLjuTb6seO3JIJ7097yQUIthcmpqaOY,3616 +pip/_internal/operations/check.py,sha256=yC2XWth6iehGGE_fj7XRJLjVKBsTIG3ZoWRkFi3rOwc,5894 +pip/_internal/operations/freeze.py,sha256=PDdY-y_ZtZZJLAKcaWPIGRKAGW7DXR48f0aMRU0j7BA,9854 +pip/_internal/operations/install/__init__.py,sha256=ak-UETcQPKlFZaWoYKWu5QVXbpFBvg0sXc3i0O4vSYY,50 +pip/_internal/operations/install/__pycache__/__init__.cpython-313.pyc,, +pip/_internal/operations/install/__pycache__/editable_legacy.cpython-313.pyc,, +pip/_internal/operations/install/__pycache__/wheel.cpython-313.pyc,, +pip/_internal/operations/install/editable_legacy.py,sha256=zS6Hm-9SHcrVuA_9Irc9Qc8CIoFLDMZiSvi5rizZe34,1311 +pip/_internal/operations/install/wheel.py,sha256=8aepxxAFmnzZFtcMCv-1I4T_maEkQd4hXZztYWE4yR0,27956 +pip/_internal/operations/prepare.py,sha256=gdbQUAjxLqqz8P2KFem2OEJ_6V3aTuozQkiBLfCCVAU,28613 +pip/_internal/pyproject.py,sha256=T72qz0buaY0s-mgrXLUU1sONf4Rk97dJu0qpniBCGU8,7233 +pip/_internal/req/__init__.py,sha256=vM0bWAl3By5ALRIXWmKdkMca18KQfbGXVJZm3Igpwmg,3122 +pip/_internal/req/__pycache__/__init__.cpython-313.pyc,, +pip/_internal/req/__pycache__/constructors.cpython-313.pyc,, +pip/_internal/req/__pycache__/req_dependency_group.cpython-313.pyc,, +pip/_internal/req/__pycache__/req_file.cpython-313.pyc,, +pip/_internal/req/__pycache__/req_install.cpython-313.pyc,, +pip/_internal/req/__pycache__/req_set.cpython-313.pyc,, +pip/_internal/req/__pycache__/req_uninstall.cpython-313.pyc,, +pip/_internal/req/constructors.py,sha256=QlLjBIg5xPxyQx_hGGB7Fz527ruAXC6nbVMoGfXzNnM,18320 +pip/_internal/req/req_dependency_group.py,sha256=0yEQCUaO5Bza66Y3D5o9JRf0qII5QgCRugn1x5aRivA,2618 +pip/_internal/req/req_file.py,sha256=j_1PcBDO0aKb8pjjRMsZQrwts5YNrKsrHjPyV56XoHo,20161 +pip/_internal/req/req_install.py,sha256=N4LxV9LHimgle5zEkNFk9WSfi51KXvK9y73mvYl9uig,35718 +pip/_internal/req/req_set.py,sha256=awkqIXnYA4Prmsj0Qb3zhqdbYUmXd-1o0P-KZ3mvRQs,2828 +pip/_internal/req/req_uninstall.py,sha256=dCmOHt-9RaJBq921L4tMH3PmIBDetGplnbjRKXmGt00,24099 +pip/_internal/resolution/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/resolution/__pycache__/__init__.cpython-313.pyc,, +pip/_internal/resolution/__pycache__/base.cpython-313.pyc,, +pip/_internal/resolution/base.py,sha256=RIsqSP79olPdOgtPKW-oOQ364ICVopehA6RfGkRfe2s,577 +pip/_internal/resolution/legacy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/resolution/legacy/__pycache__/__init__.cpython-313.pyc,, +pip/_internal/resolution/legacy/__pycache__/resolver.cpython-313.pyc,, +pip/_internal/resolution/legacy/resolver.py,sha256=bwUqE66etz2bcPabqxed18-iyqqb-kx3Er2aT6GeUJY,24060 +pip/_internal/resolution/resolvelib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-313.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/base.cpython-313.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-313.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-313.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-313.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-313.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-313.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-313.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-313.pyc,, +pip/_internal/resolution/resolvelib/base.py,sha256=_AoP0ZWlaSct8CRDn2ol3CbNn4zDtnh_0zQGjXASDKI,5047 +pip/_internal/resolution/resolvelib/candidates.py,sha256=dblosDcMumdT075Y8D1yXiJrWF_wDgIGe_8hUQIneTw,20208 +pip/_internal/resolution/resolvelib/factory.py,sha256=XdCpLq59oVIHMMekc2cgD7imcVD00_ioTnu9_k3iq3E,32577 +pip/_internal/resolution/resolvelib/found_candidates.py,sha256=8bZYDCZLXSdLHy_s1o5f4r15HmKvqFUhzBUQOF21Lr4,6018 +pip/_internal/resolution/resolvelib/provider.py,sha256=1TJC2o7F02aTMaoqa0Ayz45PrE-pQ5XshgGUmweESkk,11144 +pip/_internal/resolution/resolvelib/reporter.py,sha256=atiTh1bnKc-KfG841nRw3dk0WbOhr_L8luJHcY0JoSs,3270 +pip/_internal/resolution/resolvelib/requirements.py,sha256=z0gXmWfo03ynOnhF8kpj5SycgroerDhQV0VWzmAKAfg,8076 +pip/_internal/resolution/resolvelib/resolver.py,sha256=f0fmYzB9G4wWxssfj4qerDi_F_8vjAaYPbGLb7N7tQw,13617 +pip/_internal/self_outdated_check.py,sha256=wfikZmcGVIrnepL413-0rnQ3jeOA0unOKPle0Ovwoho,8326 +pip/_internal/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/utils/__pycache__/__init__.cpython-313.pyc,, +pip/_internal/utils/__pycache__/_jaraco_text.cpython-313.pyc,, +pip/_internal/utils/__pycache__/_log.cpython-313.pyc,, +pip/_internal/utils/__pycache__/appdirs.cpython-313.pyc,, +pip/_internal/utils/__pycache__/compat.cpython-313.pyc,, +pip/_internal/utils/__pycache__/compatibility_tags.cpython-313.pyc,, +pip/_internal/utils/__pycache__/datetime.cpython-313.pyc,, +pip/_internal/utils/__pycache__/deprecation.cpython-313.pyc,, +pip/_internal/utils/__pycache__/direct_url_helpers.cpython-313.pyc,, +pip/_internal/utils/__pycache__/egg_link.cpython-313.pyc,, +pip/_internal/utils/__pycache__/entrypoints.cpython-313.pyc,, +pip/_internal/utils/__pycache__/filesystem.cpython-313.pyc,, +pip/_internal/utils/__pycache__/filetypes.cpython-313.pyc,, +pip/_internal/utils/__pycache__/glibc.cpython-313.pyc,, +pip/_internal/utils/__pycache__/hashes.cpython-313.pyc,, +pip/_internal/utils/__pycache__/logging.cpython-313.pyc,, +pip/_internal/utils/__pycache__/misc.cpython-313.pyc,, +pip/_internal/utils/__pycache__/packaging.cpython-313.pyc,, +pip/_internal/utils/__pycache__/retry.cpython-313.pyc,, +pip/_internal/utils/__pycache__/setuptools_build.cpython-313.pyc,, +pip/_internal/utils/__pycache__/subprocess.cpython-313.pyc,, +pip/_internal/utils/__pycache__/temp_dir.cpython-313.pyc,, +pip/_internal/utils/__pycache__/unpacking.cpython-313.pyc,, +pip/_internal/utils/__pycache__/urls.cpython-313.pyc,, +pip/_internal/utils/__pycache__/virtualenv.cpython-313.pyc,, +pip/_internal/utils/__pycache__/wheel.cpython-313.pyc,, +pip/_internal/utils/_jaraco_text.py,sha256=M15uUPIh5NpP1tdUGBxRau6q1ZAEtI8-XyLEETscFfE,3350 +pip/_internal/utils/_log.py,sha256=-jHLOE_THaZz5BFcCnoSL9EYAtJ0nXem49s9of4jvKw,1015 +pip/_internal/utils/appdirs.py,sha256=LrzDPZMKVh0rubtCx9vu3XlZbLCSug6VSj4Qsvt66BA,1681 +pip/_internal/utils/compat.py,sha256=C9LHXJAKkwAH8Hn3nPkz9EYK3rqPBeO_IXkOG2zzsdQ,2514 +pip/_internal/utils/compatibility_tags.py,sha256=DiNSLqpuruXUamGQwOJ2WZByDGLTGaXi9O-Xf8fOi34,6630 +pip/_internal/utils/datetime.py,sha256=Gt29Ml4ToPSM88j54iu43WKtrU9A-moP4QmMiiqzedU,241 +pip/_internal/utils/deprecation.py,sha256=HVhvyO5qiRFcG88PhZlp_87qdKQNwPTUIIHWtsTR2yI,3696 +pip/_internal/utils/direct_url_helpers.py,sha256=ttKv4GMUqlRwPPog9_CUopy6SDgoxVILzeBJzgfn2tg,3200 +pip/_internal/utils/egg_link.py,sha256=YWfsrbmfcrfWgqQYy6OuIjsyb9IfL1q_2v4zsms1WjI,2459 +pip/_internal/utils/entrypoints.py,sha256=uPjAyShKObdotjQjJUzprQ6r3xQvDIZwUYfHHqZ7Dok,3324 +pip/_internal/utils/filesystem.py,sha256=du4lOOlTOBq22V5kf0yXMydo1KU2Cs-cMtYs3bEk13c,4988 +pip/_internal/utils/filetypes.py,sha256=sEMa38qaqjvx1Zid3OCAUja31BOBU-USuSMPBvU3yjo,689 +pip/_internal/utils/glibc.py,sha256=sEh8RJJLYSdRvTqAO4THVPPA-YSDVLD4SI9So-bxX1U,3726 +pip/_internal/utils/hashes.py,sha256=d32UI1en8nyqZzdZQvxUVdfeBoe4ADWx7HtrIM4-XQ4,4998 +pip/_internal/utils/logging.py,sha256=RtRe7Vp0COC4UBewYdfKicXjCTmHXpDZHdReTzJvB78,12108 +pip/_internal/utils/misc.py,sha256=1jEpqjfqYmQ6K3D4_O8xXSPn8aEfH2uMOlNM7KPvSrg,23374 +pip/_internal/utils/packaging.py,sha256=s5tpUmFumwV0H9JSTzryrIY4JwQM8paGt7Sm7eNwt2Y,1601 +pip/_internal/utils/retry.py,sha256=83wReEB2rcntMZ5VLd7ascaYSjn_kLdlQCqxILxWkPM,1461 +pip/_internal/utils/setuptools_build.py,sha256=0RK4K07qpaBgnYm50_f5d5VdedYPk1fl9QNKaOa60XM,4499 +pip/_internal/utils/subprocess.py,sha256=r4-Ba_Yc3uZXQpi0K4pZFsCT_QqdSvtF3XJ-204QWaA,8983 +pip/_internal/utils/temp_dir.py,sha256=D9c8D7WOProOO8GGDqpBeVSj10NGFmunG0o2TodjjIU,9307 +pip/_internal/utils/unpacking.py,sha256=4tY2D-sbtBt2Lcws98Em3MPAPdHnXSBXnpSU0LoGUZQ,11939 +pip/_internal/utils/urls.py,sha256=aF_eg9ul5d8bMCxfSSSxQcfs-OpJdbStYqZHoy2K1RE,1601 +pip/_internal/utils/virtualenv.py,sha256=mX-UPyw1MPxhwUxKhbqWWX70J6PHXAJjVVrRnG0h9mc,3455 +pip/_internal/utils/wheel.py,sha256=YdRuj6MicG-Q9Mg03FbUv1WTLam6Lc7AgijY4voVyis,4468 +pip/_internal/vcs/__init__.py,sha256=UAqvzpbi0VbZo3Ub6skEeZAw-ooIZR-zX_WpCbxyCoU,596 +pip/_internal/vcs/__pycache__/__init__.cpython-313.pyc,, +pip/_internal/vcs/__pycache__/bazaar.cpython-313.pyc,, +pip/_internal/vcs/__pycache__/git.cpython-313.pyc,, +pip/_internal/vcs/__pycache__/mercurial.cpython-313.pyc,, +pip/_internal/vcs/__pycache__/subversion.cpython-313.pyc,, +pip/_internal/vcs/__pycache__/versioncontrol.cpython-313.pyc,, +pip/_internal/vcs/bazaar.py,sha256=3W1eHjkYx2vc6boeb2NBh4I_rlGAXM-vrzfNhLm1Rxg,3734 +pip/_internal/vcs/git.py,sha256=TTeqDuzS-_BFSNuUStVWmE2nGDpKuvUhBBJk_CCQXV0,19144 +pip/_internal/vcs/mercurial.py,sha256=w1ZJWLKqNP1onEjkfjlwBVnMqPZNSIER8ayjQcnTq4w,5575 +pip/_internal/vcs/subversion.py,sha256=uUgdPvxmvEB8Qwtjr0Hc0XgFjbiNi5cbvI4vARLOJXo,11787 +pip/_internal/vcs/versioncontrol.py,sha256=d-v1mcLxofg2FaIqBrV-e-ZcjOgQhS0oxXpki1v1yXs,22502 +pip/_internal/wheel_builder.py,sha256=PDF2w6pxeec5skP3gLIrR_VrJrAO4pHRpUnkNbJ8R5Q,11225 +pip/_vendor/__init__.py,sha256=WzusPTGWIMeQQWSVJ0h2rafGkVTa9WKJ2HT-2-EoZrU,4907 +pip/_vendor/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/cachecontrol/__init__.py,sha256=BF2n5OeQz1QW2xSey2LxfNCtwbjnTadXdIH2toqJecg,677 +pip/_vendor/cachecontrol/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-313.pyc,, +pip/_vendor/cachecontrol/__pycache__/adapter.cpython-313.pyc,, +pip/_vendor/cachecontrol/__pycache__/cache.cpython-313.pyc,, +pip/_vendor/cachecontrol/__pycache__/controller.cpython-313.pyc,, +pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-313.pyc,, +pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-313.pyc,, +pip/_vendor/cachecontrol/__pycache__/serialize.cpython-313.pyc,, +pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-313.pyc,, +pip/_vendor/cachecontrol/_cmd.py,sha256=iist2EpzJvDVIhMAxXq8iFnTBsiZAd6iplxfmNboNyk,1737 +pip/_vendor/cachecontrol/adapter.py,sha256=8y6rTPXOzVHmDKCW5CR9sivLVuDv-cpdGcZYdRWNaPw,6599 +pip/_vendor/cachecontrol/cache.py,sha256=OXwv7Fn2AwnKNiahJHnjtvaKLndvVLv_-zO-ltlV9qI,1953 +pip/_vendor/cachecontrol/caches/__init__.py,sha256=dtrrroK5BnADR1GWjCZ19aZ0tFsMfvFBtLQQU1sp_ag,303 +pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-313.pyc,, +pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-313.pyc,, +pip/_vendor/cachecontrol/caches/file_cache.py,sha256=d8upFmy_zwaCmlbWEVBlLXFddt8Zw8c5SFpxeOZsdfw,4117 +pip/_vendor/cachecontrol/caches/redis_cache.py,sha256=9rmqwtYu_ljVkW6_oLqbC7EaX_a8YT_yLuna-eS0dgo,1386 +pip/_vendor/cachecontrol/controller.py,sha256=cx0Hl8xLZgUuXuy78Gih9AYjCtqurmYjVJxyA4yWt7w,19101 +pip/_vendor/cachecontrol/filewrapper.py,sha256=2ktXNPE0KqnyzF24aOsKCA58HQq1xeC6l2g6_zwjghc,4291 +pip/_vendor/cachecontrol/heuristics.py,sha256=gqMXU8w0gQuEQiSdu3Yg-0vd9kW7nrWKbLca75rheGE,4881 +pip/_vendor/cachecontrol/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/cachecontrol/serialize.py,sha256=HQd2IllQ05HzPkVLMXTF2uX5mjEQjDBkxCqUJUODpZk,5163 +pip/_vendor/cachecontrol/wrapper.py,sha256=hsGc7g8QGQTT-4f8tgz3AM5qwScg6FO0BSdLSRdEvpU,1417 +pip/_vendor/certifi/__init__.py,sha256=dvNDUdAXp-hzoYcf09PXzLmHIlPfypaBQPFp5esDXyo,94 +pip/_vendor/certifi/__main__.py,sha256=1k3Cr95vCxxGRGDljrW3wMdpZdL3Nhf0u1n-k2qdsCY,255 +pip/_vendor/certifi/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/certifi/__pycache__/__main__.cpython-313.pyc,, +pip/_vendor/certifi/__pycache__/core.cpython-313.pyc,, +pip/_vendor/certifi/cacert.pem,sha256=lm3cYJxEv9SoAx4Atc3NiT1D92d965vcID6H39f_93o,290057 +pip/_vendor/certifi/core.py,sha256=amb3joGaRvFK1cXsJKGxzIKL5O_TgUmGyM87ML8FQH4,3694 +pip/_vendor/certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/dependency_groups/__init__.py,sha256=C3OFu0NGwDzQ4LOmmSOFPsRSvkbBn-mdd4j_5YqJw-s,250 +pip/_vendor/dependency_groups/__main__.py,sha256=UNTM7P5mfVtT7wDi9kOTXWgV3fu3e8bTrt1Qp1jvjKo,1709 +pip/_vendor/dependency_groups/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/dependency_groups/__pycache__/__main__.cpython-313.pyc,, +pip/_vendor/dependency_groups/__pycache__/_implementation.cpython-313.pyc,, +pip/_vendor/dependency_groups/__pycache__/_lint_dependency_groups.cpython-313.pyc,, +pip/_vendor/dependency_groups/__pycache__/_pip_wrapper.cpython-313.pyc,, +pip/_vendor/dependency_groups/__pycache__/_toml_compat.cpython-313.pyc,, +pip/_vendor/dependency_groups/_implementation.py,sha256=Gqb2DlQELRakeHlKf6QtQSW0M-bcEomxHw4JsvID1ls,8041 +pip/_vendor/dependency_groups/_lint_dependency_groups.py,sha256=yp-DDqKXtbkDTNa0ifa-FmOA8ra24lPZEXftW-R5AuI,1710 +pip/_vendor/dependency_groups/_pip_wrapper.py,sha256=nuVW_w_ntVxpE26ELEvngMY0N04sFLsijXRyZZROFG8,1865 +pip/_vendor/dependency_groups/_toml_compat.py,sha256=BHnXnFacm3DeolsA35GjI6qkDApvua-1F20kv3BfZWE,285 +pip/_vendor/dependency_groups/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/distlib/__init__.py,sha256=Deo3uo98aUyIfdKJNqofeSEFWwDzrV2QeGLXLsgq0Ag,625 +pip/_vendor/distlib/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/distlib/__pycache__/compat.cpython-313.pyc,, +pip/_vendor/distlib/__pycache__/resources.cpython-313.pyc,, +pip/_vendor/distlib/__pycache__/scripts.cpython-313.pyc,, +pip/_vendor/distlib/__pycache__/util.cpython-313.pyc,, +pip/_vendor/distlib/compat.py,sha256=2jRSjRI4o-vlXeTK2BCGIUhkc6e9ZGhSsacRM5oseTw,41467 +pip/_vendor/distlib/resources.py,sha256=LwbPksc0A1JMbi6XnuPdMBUn83X7BPuFNWqPGEKI698,10820 +pip/_vendor/distlib/scripts.py,sha256=Qvp76E9Jc3IgyYubnpqI9fS7eseGOe4FjpeVKqKt9Iw,18612 +pip/_vendor/distlib/util.py,sha256=vMPGvsS4j9hF6Y9k3Tyom1aaHLb0rFmZAEyzeAdel9w,66682 +pip/_vendor/distro/__init__.py,sha256=2fHjF-SfgPvjyNZ1iHh_wjqWdR_Yo5ODHwZC0jLBPhc,981 +pip/_vendor/distro/__main__.py,sha256=bu9d3TifoKciZFcqRBuygV3GSuThnVD_m2IK4cz96Vs,64 +pip/_vendor/distro/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/distro/__pycache__/__main__.cpython-313.pyc,, +pip/_vendor/distro/__pycache__/distro.cpython-313.pyc,, +pip/_vendor/distro/distro.py,sha256=XqbefacAhDT4zr_trnbA15eY8vdK4GTghgmvUGrEM_4,49430 +pip/_vendor/distro/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/idna/__init__.py,sha256=MPqNDLZbXqGaNdXxAFhiqFPKEQXju2jNQhCey6-5eJM,868 +pip/_vendor/idna/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/idna/__pycache__/codec.cpython-313.pyc,, +pip/_vendor/idna/__pycache__/compat.cpython-313.pyc,, +pip/_vendor/idna/__pycache__/core.cpython-313.pyc,, +pip/_vendor/idna/__pycache__/idnadata.cpython-313.pyc,, +pip/_vendor/idna/__pycache__/intranges.cpython-313.pyc,, +pip/_vendor/idna/__pycache__/package_data.cpython-313.pyc,, +pip/_vendor/idna/__pycache__/uts46data.cpython-313.pyc,, +pip/_vendor/idna/codec.py,sha256=PEew3ItwzjW4hymbasnty2N2OXvNcgHB-JjrBuxHPYY,3422 +pip/_vendor/idna/compat.py,sha256=RzLy6QQCdl9784aFhb2EX9EKGCJjg0P3PilGdeXXcx8,316 +pip/_vendor/idna/core.py,sha256=YJYyAMnwiQEPjVC4-Fqu_p4CJ6yKKuDGmppBNQNQpFs,13239 +pip/_vendor/idna/idnadata.py,sha256=W30GcIGvtOWYwAjZj4ZjuouUutC6ffgNuyjJy7fZ-lo,78306 +pip/_vendor/idna/intranges.py,sha256=amUtkdhYcQG8Zr-CoMM_kVRacxkivC1WgxN1b63KKdU,1898 +pip/_vendor/idna/package_data.py,sha256=q59S3OXsc5VI8j6vSD0sGBMyk6zZ4vWFREE88yCJYKs,21 +pip/_vendor/idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/idna/uts46data.py,sha256=rt90K9J40gUSwppDPCrhjgi5AA6pWM65dEGRSf6rIhM,239289 +pip/_vendor/msgpack/__init__.py,sha256=q2T5PRsITVpeytgS0WiSqok9IY8V_aFiC8VVlXTCTgA,1109 +pip/_vendor/msgpack/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/msgpack/__pycache__/exceptions.cpython-313.pyc,, +pip/_vendor/msgpack/__pycache__/ext.cpython-313.pyc,, +pip/_vendor/msgpack/__pycache__/fallback.cpython-313.pyc,, +pip/_vendor/msgpack/exceptions.py,sha256=dCTWei8dpkrMsQDcjQk74ATl9HsIBH0ybt8zOPNqMYc,1081 +pip/_vendor/msgpack/ext.py,sha256=kteJv03n9tYzd5oo3xYopVTo4vRaAxonBQQJhXohZZo,5726 +pip/_vendor/msgpack/fallback.py,sha256=0g1Pzp0vtmBEmJ5w9F3s_-JMVURP8RS4G1cc5TRaAsI,32390 +pip/_vendor/packaging/__init__.py,sha256=_0cDiPVf2S-bNfVmZguxxzmrIYWlyASxpqph4qsJWUc,494 +pip/_vendor/packaging/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/packaging/__pycache__/_elffile.cpython-313.pyc,, +pip/_vendor/packaging/__pycache__/_manylinux.cpython-313.pyc,, +pip/_vendor/packaging/__pycache__/_musllinux.cpython-313.pyc,, +pip/_vendor/packaging/__pycache__/_parser.cpython-313.pyc,, +pip/_vendor/packaging/__pycache__/_structures.cpython-313.pyc,, +pip/_vendor/packaging/__pycache__/_tokenizer.cpython-313.pyc,, +pip/_vendor/packaging/__pycache__/markers.cpython-313.pyc,, +pip/_vendor/packaging/__pycache__/metadata.cpython-313.pyc,, +pip/_vendor/packaging/__pycache__/requirements.cpython-313.pyc,, +pip/_vendor/packaging/__pycache__/specifiers.cpython-313.pyc,, +pip/_vendor/packaging/__pycache__/tags.cpython-313.pyc,, +pip/_vendor/packaging/__pycache__/utils.cpython-313.pyc,, +pip/_vendor/packaging/__pycache__/version.cpython-313.pyc,, +pip/_vendor/packaging/_elffile.py,sha256=UkrbDtW7aeq3qqoAfU16ojyHZ1xsTvGke_WqMTKAKd0,3286 +pip/_vendor/packaging/_manylinux.py,sha256=t4y_-dTOcfr36gLY-ztiOpxxJFGO2ikC11HgfysGxiM,9596 +pip/_vendor/packaging/_musllinux.py,sha256=p9ZqNYiOItGee8KcZFeHF_YcdhVwGHdK6r-8lgixvGQ,2694 +pip/_vendor/packaging/_parser.py,sha256=gYfnj0pRHflVc4RHZit13KNTyN9iiVcU2RUCGi22BwM,10221 +pip/_vendor/packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431 +pip/_vendor/packaging/_tokenizer.py,sha256=OYzt7qKxylOAJ-q0XyK1qAycyPRYLfMPdGQKRXkZWyI,5310 +pip/_vendor/packaging/licenses/__init__.py,sha256=3bx-gryo4sRv5LsrwApouy65VIs3u6irSORJzALkrzU,5727 +pip/_vendor/packaging/licenses/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/packaging/licenses/__pycache__/_spdx.cpython-313.pyc,, +pip/_vendor/packaging/licenses/_spdx.py,sha256=oAm1ztPFwlsmCKe7lAAsv_OIOfS1cWDu9bNBkeu-2ns,48398 +pip/_vendor/packaging/markers.py,sha256=P0we27jm1xUzgGMJxBjtUFCIWeBxTsMeJTOJ6chZmAY,12049 +pip/_vendor/packaging/metadata.py,sha256=8IZErqQQnNm53dZZuYq4FGU4_dpyinMeH1QFBIWIkfE,34739 +pip/_vendor/packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/packaging/requirements.py,sha256=gYyRSAdbrIyKDY66ugIDUQjRMvxkH2ALioTmX3tnL6o,2947 +pip/_vendor/packaging/specifiers.py,sha256=yc9D_MycJEmwUpZvcs1OZL9HfiNFmyw0RZaeHRNHkPw,40079 +pip/_vendor/packaging/tags.py,sha256=41s97W9Zatrq2Ed7Rc3qeBDaHe8pKKvYq2mGjwahfXk,22745 +pip/_vendor/packaging/utils.py,sha256=0F3Hh9OFuRgrhTgGZUl5K22Fv1YP2tZl1z_2gO6kJiA,5050 +pip/_vendor/packaging/version.py,sha256=oiHqzTUv_p12hpjgsLDVcaF5hT7pDaSOViUNMD4GTW0,16688 +pip/_vendor/pkg_resources/__init__.py,sha256=vbTJ0_ruUgGxQjlEqsruFmiNPVyh2t9q-zyTDT053xI,124451 +pip/_vendor/pkg_resources/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/platformdirs/__init__.py,sha256=UfeSHWl8AeTtbOBOoHAxK4dODOWkZtfy-m_i7cWdJ8c,22344 +pip/_vendor/platformdirs/__main__.py,sha256=jBJ8zb7Mpx5ebcqF83xrpO94MaeCpNGHVf9cvDN2JLg,1505 +pip/_vendor/platformdirs/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/platformdirs/__pycache__/__main__.cpython-313.pyc,, +pip/_vendor/platformdirs/__pycache__/android.cpython-313.pyc,, +pip/_vendor/platformdirs/__pycache__/api.cpython-313.pyc,, +pip/_vendor/platformdirs/__pycache__/macos.cpython-313.pyc,, +pip/_vendor/platformdirs/__pycache__/unix.cpython-313.pyc,, +pip/_vendor/platformdirs/__pycache__/version.cpython-313.pyc,, +pip/_vendor/platformdirs/__pycache__/windows.cpython-313.pyc,, +pip/_vendor/platformdirs/android.py,sha256=r0DshVBf-RO1jXJGX8C4Til7F1XWt-bkdWMgmvEiaYg,9013 +pip/_vendor/platformdirs/api.py,sha256=U9EzI3EYxcXWUCtIGRllqrcN99i2LSY1mq2-GtsUwEQ,9277 +pip/_vendor/platformdirs/macos.py,sha256=UlbyFZ8Rzu3xndCqQEHrfsYTeHwYdFap1Ioz-yxveT4,6154 +pip/_vendor/platformdirs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/platformdirs/unix.py,sha256=WZmkUA--L3JNRGmz32s35YfoD3ica6xKIPdCV_HhLcs,10458 +pip/_vendor/platformdirs/version.py,sha256=ddN3EcUPfer7CbqmyFNmg03R3u-qDn32T_fLsx25-Ck,511 +pip/_vendor/platformdirs/windows.py,sha256=IFpiohUBwxPtCzlyKwNtxyW4Jk8haa6W8o59mfrDXVo,10125 +pip/_vendor/pygments/__init__.py,sha256=8uNqJCCwXqbEx5aSsBr0FykUQOBDKBihO5mPqiw1aqo,2983 +pip/_vendor/pygments/__main__.py,sha256=WrndpSe6i1ckX_SQ1KaxD9CTKGzD0EuCOFxcbwFpoLU,353 +pip/_vendor/pygments/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/pygments/__pycache__/__main__.cpython-313.pyc,, +pip/_vendor/pygments/__pycache__/console.cpython-313.pyc,, +pip/_vendor/pygments/__pycache__/filter.cpython-313.pyc,, +pip/_vendor/pygments/__pycache__/formatter.cpython-313.pyc,, +pip/_vendor/pygments/__pycache__/lexer.cpython-313.pyc,, +pip/_vendor/pygments/__pycache__/modeline.cpython-313.pyc,, +pip/_vendor/pygments/__pycache__/plugin.cpython-313.pyc,, +pip/_vendor/pygments/__pycache__/regexopt.cpython-313.pyc,, +pip/_vendor/pygments/__pycache__/scanner.cpython-313.pyc,, +pip/_vendor/pygments/__pycache__/sphinxext.cpython-313.pyc,, +pip/_vendor/pygments/__pycache__/style.cpython-313.pyc,, +pip/_vendor/pygments/__pycache__/token.cpython-313.pyc,, +pip/_vendor/pygments/__pycache__/unistring.cpython-313.pyc,, +pip/_vendor/pygments/__pycache__/util.cpython-313.pyc,, +pip/_vendor/pygments/console.py,sha256=AagDWqwea2yBWf10KC9ptBgMpMjxKp8yABAmh-NQOVk,1718 +pip/_vendor/pygments/filter.py,sha256=YLtpTnZiu07nY3oK9nfR6E9Y1FBHhP5PX8gvkJWcfag,1910 +pip/_vendor/pygments/filters/__init__.py,sha256=4U4jtA0X3iP83uQnB9-TI-HDSw8E8y8zMYHa0UjbbaI,40392 +pip/_vendor/pygments/filters/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/pygments/formatter.py,sha256=KZQMmyo_xkOIkQG8g66LYEkBh1bx7a0HyGCBcvhI9Ew,4390 +pip/_vendor/pygments/formatters/__init__.py,sha256=KTwBmnXlaopJhQDOemVHYHskiDghuq-08YtP6xPNJPg,5385 +pip/_vendor/pygments/formatters/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/pygments/formatters/__pycache__/_mapping.cpython-313.pyc,, +pip/_vendor/pygments/formatters/_mapping.py,sha256=1Cw37FuQlNacnxRKmtlPX4nyLoX9_ttko5ZwscNUZZ4,4176 +pip/_vendor/pygments/lexer.py,sha256=_kBrOJ_NT5Tl0IVM0rA9c8eysP6_yrlGzEQI0eVYB-A,35349 +pip/_vendor/pygments/lexers/__init__.py,sha256=wbIME35GH7bI1B9rNPJFqWT-ij_RApZDYPUlZycaLzA,12115 +pip/_vendor/pygments/lexers/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/pygments/lexers/__pycache__/_mapping.cpython-313.pyc,, +pip/_vendor/pygments/lexers/__pycache__/python.cpython-313.pyc,, +pip/_vendor/pygments/lexers/_mapping.py,sha256=l4tCXM8e9aPC2BD6sjIr0deT-J-z5tHgCwL-p1fS0PE,77602 +pip/_vendor/pygments/lexers/python.py,sha256=vxjn1cOHclIKJKxoyiBsQTY65GHbkZtZRuKQ2AVCKaw,53853 +pip/_vendor/pygments/modeline.py,sha256=K5eSkR8GS1r5OkXXTHOcV0aM_6xpk9eWNEIAW-OOJ2g,1005 +pip/_vendor/pygments/plugin.py,sha256=tPx0rJCTIZ9ioRgLNYG4pifCbAwTRUZddvLw-NfAk2w,1891 +pip/_vendor/pygments/regexopt.py,sha256=wXaP9Gjp_hKAdnICqoDkRxAOQJSc4v3X6mcxx3z-TNs,3072 +pip/_vendor/pygments/scanner.py,sha256=nNcETRR1tRuiTaHmHSTTECVYFPcLf6mDZu1e4u91A9E,3092 +pip/_vendor/pygments/sphinxext.py,sha256=5x7Zh9YlU6ISJ31dMwduiaanb5dWZnKg3MyEQsseNnQ,7981 +pip/_vendor/pygments/style.py,sha256=PlOZqlsnTVd58RGy50vkA2cXQ_lP5bF5EGMEBTno6DA,6420 +pip/_vendor/pygments/styles/__init__.py,sha256=x9ebctfyvCAFpMTlMJ5YxwcNYBzjgq6zJaKkNm78r4M,2042 +pip/_vendor/pygments/styles/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/pygments/styles/__pycache__/_mapping.cpython-313.pyc,, +pip/_vendor/pygments/styles/_mapping.py,sha256=6lovFUE29tz6EsV3XYY4hgozJ7q1JL7cfO3UOlgnS8w,3312 +pip/_vendor/pygments/token.py,sha256=WbdWGhYm_Vosb0DDxW9lHNPgITXfWTsQmHt6cy9RbcM,6226 +pip/_vendor/pygments/unistring.py,sha256=al-_rBemRuGvinsrM6atNsHTmJ6DUbw24q2O2Ru1cBc,63208 +pip/_vendor/pygments/util.py,sha256=oRtSpiAo5jM9ulntkvVbgXUdiAW57jnuYGB7t9fYuhc,10031 +pip/_vendor/pyproject_hooks/__init__.py,sha256=cPB_a9LXz5xvsRbX1o2qyAdjLatZJdQ_Lc5McNX-X7Y,691 +pip/_vendor/pyproject_hooks/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/pyproject_hooks/__pycache__/_impl.cpython-313.pyc,, +pip/_vendor/pyproject_hooks/_impl.py,sha256=jY-raxnmyRyB57ruAitrJRUzEexuAhGTpgMygqx67Z4,14936 +pip/_vendor/pyproject_hooks/_in_process/__init__.py,sha256=MJNPpfIxcO-FghxpBbxkG1rFiQf6HOUbV4U5mq0HFns,557 +pip/_vendor/pyproject_hooks/_in_process/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/pyproject_hooks/_in_process/__pycache__/_in_process.cpython-313.pyc,, +pip/_vendor/pyproject_hooks/_in_process/_in_process.py,sha256=qcXMhmx__MIJq10gGHW3mA4Tl8dy8YzHMccwnNoKlw0,12216 +pip/_vendor/pyproject_hooks/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/requests/__init__.py,sha256=HlB_HzhrzGtfD_aaYUwUh1zWXLZ75_YCLyit75d0Vz8,5057 +pip/_vendor/requests/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/requests/__pycache__/__version__.cpython-313.pyc,, +pip/_vendor/requests/__pycache__/_internal_utils.cpython-313.pyc,, +pip/_vendor/requests/__pycache__/adapters.cpython-313.pyc,, +pip/_vendor/requests/__pycache__/api.cpython-313.pyc,, +pip/_vendor/requests/__pycache__/auth.cpython-313.pyc,, +pip/_vendor/requests/__pycache__/certs.cpython-313.pyc,, +pip/_vendor/requests/__pycache__/compat.cpython-313.pyc,, +pip/_vendor/requests/__pycache__/cookies.cpython-313.pyc,, +pip/_vendor/requests/__pycache__/exceptions.cpython-313.pyc,, +pip/_vendor/requests/__pycache__/help.cpython-313.pyc,, +pip/_vendor/requests/__pycache__/hooks.cpython-313.pyc,, +pip/_vendor/requests/__pycache__/models.cpython-313.pyc,, +pip/_vendor/requests/__pycache__/packages.cpython-313.pyc,, +pip/_vendor/requests/__pycache__/sessions.cpython-313.pyc,, +pip/_vendor/requests/__pycache__/status_codes.cpython-313.pyc,, +pip/_vendor/requests/__pycache__/structures.cpython-313.pyc,, +pip/_vendor/requests/__pycache__/utils.cpython-313.pyc,, +pip/_vendor/requests/__version__.py,sha256=FDq681Y3EvBjdDp5UqplMZ28uTTYlM_Jib0sAV-NpXc,435 +pip/_vendor/requests/_internal_utils.py,sha256=nMQymr4hs32TqVo5AbCrmcJEhvPUh7xXlluyqwslLiQ,1495 +pip/_vendor/requests/adapters.py,sha256=J7VeVxKBvawbtlX2DERVo05J9BXTcWYLMHNd1Baa-bk,27607 +pip/_vendor/requests/api.py,sha256=_Zb9Oa7tzVIizTKwFrPjDEY9ejtm_OnSRERnADxGsQs,6449 +pip/_vendor/requests/auth.py,sha256=kF75tqnLctZ9Mf_hm9TZIj4cQWnN5uxRz8oWsx5wmR0,10186 +pip/_vendor/requests/certs.py,sha256=kHDlkK_beuHXeMPc5jta2wgl8gdKeUWt5f2nTDVrvt8,441 +pip/_vendor/requests/compat.py,sha256=QfbmdTFiZzjSHMXiMrd4joCRU6RabtQ9zIcPoVaHIus,1822 +pip/_vendor/requests/cookies.py,sha256=bNi-iqEj4NPZ00-ob-rHvzkvObzN3lEpgw3g6paS3Xw,18590 +pip/_vendor/requests/exceptions.py,sha256=D1wqzYWne1mS2rU43tP9CeN1G7QAy7eqL9o1god6Ejw,4272 +pip/_vendor/requests/help.py,sha256=hRKaf9u0G7fdwrqMHtF3oG16RKktRf6KiwtSq2Fo1_0,3813 +pip/_vendor/requests/hooks.py,sha256=CiuysiHA39V5UfcCBXFIx83IrDpuwfN9RcTUgv28ftQ,733 +pip/_vendor/requests/models.py,sha256=taljlg6vJ4b-xMu2TaMNFFkaiwMex_VsEQ6qUTN3wzY,35575 +pip/_vendor/requests/packages.py,sha256=_ZQDCJTJ8SP3kVWunSqBsRZNPzj2c1WFVqbdr08pz3U,1057 +pip/_vendor/requests/sessions.py,sha256=ykTI8UWGSltOfH07HKollH7kTBGw4WhiBVaQGmckTw4,30495 +pip/_vendor/requests/status_codes.py,sha256=iJUAeA25baTdw-6PfD0eF4qhpINDJRJI-yaMqxs4LEI,4322 +pip/_vendor/requests/structures.py,sha256=-IbmhVz06S-5aPSZuUthZ6-6D9XOjRuTXHOabY041XM,2912 +pip/_vendor/requests/utils.py,sha256=WS3wHSQaaEfceu1syiFo5jf4e_CWKUTep_IabOVI_J0,33225 +pip/_vendor/resolvelib/__init__.py,sha256=T0LcAr9Sfai6ZXanpwavdHEea-Anw2QZGx16zd3lMKY,541 +pip/_vendor/resolvelib/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/resolvelib/__pycache__/providers.cpython-313.pyc,, +pip/_vendor/resolvelib/__pycache__/reporters.cpython-313.pyc,, +pip/_vendor/resolvelib/__pycache__/structs.cpython-313.pyc,, +pip/_vendor/resolvelib/providers.py,sha256=pIWJbIdJJ9GFtNbtwTH0Ia43Vj6hYCEJj2DOLue15FM,8914 +pip/_vendor/resolvelib/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/resolvelib/reporters.py,sha256=pNJf4nFxLpAeKxlBUi2GEj0a2Ij1nikY0UabTKXesT4,2037 +pip/_vendor/resolvelib/resolvers/__init__.py,sha256=728M3EvmnPbVXS7ExXlv2kMu6b7wEsoPutEfl-uVk_I,640 +pip/_vendor/resolvelib/resolvers/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/resolvelib/resolvers/__pycache__/abstract.cpython-313.pyc,, +pip/_vendor/resolvelib/resolvers/__pycache__/criterion.cpython-313.pyc,, +pip/_vendor/resolvelib/resolvers/__pycache__/exceptions.cpython-313.pyc,, +pip/_vendor/resolvelib/resolvers/__pycache__/resolution.cpython-313.pyc,, +pip/_vendor/resolvelib/resolvers/abstract.py,sha256=jZOBVigE4PUub9i3F-bTvBwaIXX8S9EU3CGASBvFqEU,1558 +pip/_vendor/resolvelib/resolvers/criterion.py,sha256=lcmZGv5sKHOnFD_RzZwvlGSj19MeA-5rCMpdf2Sgw7Y,1768 +pip/_vendor/resolvelib/resolvers/exceptions.py,sha256=ln_jaQtgLlRUSFY627yiHG2gD7AgaXzRKaElFVh7fDQ,1768 +pip/_vendor/resolvelib/resolvers/resolution.py,sha256=qU64VKtN-HxoFynmnI6oP8aMPzaiKZtb_1hASH3HTHI,23994 +pip/_vendor/resolvelib/structs.py,sha256=pu-EJiR2IBITr2SQeNPRa0rXhjlStfmO_GEgAhr3004,6420 +pip/_vendor/rich/__init__.py,sha256=dRxjIL-SbFVY0q3IjSMrfgBTHrm1LZDgLOygVBwiYZc,6090 +pip/_vendor/rich/__main__.py,sha256=e_aVC-tDzarWQW9SuZMuCgBr6ODV_iDNV2Wh2xkxOlw,7896 +pip/_vendor/rich/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/__main__.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/_cell_widths.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/_emoji_codes.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/_emoji_replace.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/_export_format.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/_extension.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/_fileno.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/_inspect.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/_log_render.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/_loop.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/_null_file.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/_palettes.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/_pick.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/_ratio.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/_spinners.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/_stack.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/_timer.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/_win32_console.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/_windows.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/_windows_renderer.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/_wrap.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/abc.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/align.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/ansi.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/bar.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/box.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/cells.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/color.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/color_triplet.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/columns.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/console.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/constrain.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/containers.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/control.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/default_styles.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/diagnose.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/emoji.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/errors.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/file_proxy.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/filesize.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/highlighter.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/json.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/jupyter.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/layout.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/live.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/live_render.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/logging.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/markup.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/measure.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/padding.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/pager.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/palette.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/panel.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/pretty.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/progress.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/progress_bar.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/prompt.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/protocol.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/region.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/repr.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/rule.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/scope.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/screen.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/segment.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/spinner.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/status.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/style.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/styled.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/syntax.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/table.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/terminal_theme.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/text.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/theme.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/themes.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/traceback.cpython-313.pyc,, +pip/_vendor/rich/__pycache__/tree.cpython-313.pyc,, +pip/_vendor/rich/_cell_widths.py,sha256=fbmeyetEdHjzE_Vx2l1uK7tnPOhMs2X1lJfO3vsKDpA,10209 +pip/_vendor/rich/_emoji_codes.py,sha256=hu1VL9nbVdppJrVoijVshRlcRRe_v3dju3Mmd2sKZdY,140235 +pip/_vendor/rich/_emoji_replace.py,sha256=n-kcetsEUx2ZUmhQrfeMNc-teeGhpuSQ5F8VPBsyvDo,1064 +pip/_vendor/rich/_export_format.py,sha256=RI08pSrm5tBSzPMvnbTqbD9WIalaOoN5d4M1RTmLq1Y,2128 +pip/_vendor/rich/_extension.py,sha256=Xt47QacCKwYruzjDi-gOBq724JReDj9Cm9xUi5fr-34,265 +pip/_vendor/rich/_fileno.py,sha256=HWZxP5C2ajMbHryvAQZseflVfQoGzsKOHzKGsLD8ynQ,799 +pip/_vendor/rich/_inspect.py,sha256=ROT0PLC2GMWialWZkqJIjmYq7INRijQQkoSokWTaAiI,9656 +pip/_vendor/rich/_log_render.py,sha256=1ByI0PA1ZpxZY3CGJOK54hjlq4X-Bz_boIjIqCd8Kns,3225 +pip/_vendor/rich/_loop.py,sha256=hV_6CLdoPm0va22Wpw4zKqM0RYsz3TZxXj0PoS-9eDQ,1236 +pip/_vendor/rich/_null_file.py,sha256=ADGKp1yt-k70FMKV6tnqCqecB-rSJzp-WQsD7LPL-kg,1394 +pip/_vendor/rich/_palettes.py,sha256=cdev1JQKZ0JvlguV9ipHgznTdnvlIzUFDBb0It2PzjI,7063 +pip/_vendor/rich/_pick.py,sha256=evDt8QN4lF5CiwrUIXlOJCntitBCOsI3ZLPEIAVRLJU,423 +pip/_vendor/rich/_ratio.py,sha256=IOtl78sQCYZsmHyxhe45krkb68u9xVz7zFsXVJD-b2Y,5325 +pip/_vendor/rich/_spinners.py,sha256=U2r1_g_1zSjsjiUdAESc2iAMc3i4ri_S8PYP6kQ5z1I,19919 +pip/_vendor/rich/_stack.py,sha256=-C8OK7rxn3sIUdVwxZBBpeHhIzX0eI-VM3MemYfaXm0,351 +pip/_vendor/rich/_timer.py,sha256=zelxbT6oPFZnNrwWPpc1ktUeAT-Vc4fuFcRZLQGLtMI,417 +pip/_vendor/rich/_win32_console.py,sha256=BSaDRIMwBLITn_m0mTRLPqME5q-quGdSMuYMpYeYJwc,22755 +pip/_vendor/rich/_windows.py,sha256=aBwaD_S56SbgopIvayVmpk0Y28uwY2C5Bab1wl3Bp-I,1925 +pip/_vendor/rich/_windows_renderer.py,sha256=t74ZL3xuDCP3nmTp9pH1L5LiI2cakJuQRQleHCJerlk,2783 +pip/_vendor/rich/_wrap.py,sha256=FlSsom5EX0LVkA3KWy34yHnCfLtqX-ZIepXKh-70rpc,3404 +pip/_vendor/rich/abc.py,sha256=ON-E-ZqSSheZ88VrKX2M3PXpFbGEUUZPMa_Af0l-4f0,890 +pip/_vendor/rich/align.py,sha256=dg-7uY0ukMLLlUEsBDRLva22_sQgIJD4BK0dmZHFHug,10324 +pip/_vendor/rich/ansi.py,sha256=Avs1LHbSdcyOvDOdpELZUoULcBiYewY76eNBp6uFBhs,6921 +pip/_vendor/rich/bar.py,sha256=ldbVHOzKJOnflVNuv1xS7g6dLX2E3wMnXkdPbpzJTcs,3263 +pip/_vendor/rich/box.py,sha256=kmavBc_dn73L_g_8vxWSwYJD2uzBXOUFTtJOfpbczcM,10686 +pip/_vendor/rich/cells.py,sha256=KrQkj5-LghCCpJLSNQIyAZjndc4bnEqOEmi5YuZ9UCY,5130 +pip/_vendor/rich/color.py,sha256=3HSULVDj7qQkXUdFWv78JOiSZzfy5y1nkcYhna296V0,18211 +pip/_vendor/rich/color_triplet.py,sha256=3lhQkdJbvWPoLDO-AnYImAWmJvV5dlgYNCVZ97ORaN4,1054 +pip/_vendor/rich/columns.py,sha256=HUX0KcMm9dsKNi11fTbiM_h2iDtl8ySCaVcxlalEzq8,7131 +pip/_vendor/rich/console.py,sha256=t9azZpmRMVU5cphVBZSShNsmBxd2-IAWcTTlhor-E1s,100849 +pip/_vendor/rich/constrain.py,sha256=1VIPuC8AgtKWrcncQrjBdYqA3JVWysu6jZo1rrh7c7Q,1288 +pip/_vendor/rich/containers.py,sha256=c_56TxcedGYqDepHBMTuZdUIijitAQgnox-Qde0Z1qo,5502 +pip/_vendor/rich/control.py,sha256=EUTSUFLQbxY6Zmo_sdM-5Ls323vIHTBfN8TPulqeHUY,6487 +pip/_vendor/rich/default_styles.py,sha256=khQFqqaoDs3bprMqWpHw8nO5UpG2DN6QtuTd6LzZwYc,8257 +pip/_vendor/rich/diagnose.py,sha256=fJl1TItRn19gGwouqTg-8zPUW3YqQBqGltrfPQs1H9w,1025 +pip/_vendor/rich/emoji.py,sha256=Wd4bQubZdSy6-PyrRQNuMHtn2VkljK9uPZPVlu2cmx0,2367 +pip/_vendor/rich/errors.py,sha256=5pP3Kc5d4QJ_c0KFsxrfyhjiPVe7J1zOqSFbFAzcV-Y,642 +pip/_vendor/rich/file_proxy.py,sha256=Tl9THMDZ-Pk5Wm8sI1gGg_U5DhusmxD-FZ0fUbcU0W0,1683 +pip/_vendor/rich/filesize.py,sha256=_iz9lIpRgvW7MNSeCZnLg-HwzbP4GETg543WqD8SFs0,2484 +pip/_vendor/rich/highlighter.py,sha256=G_sn-8DKjM1sEjLG_oc4ovkWmiUpWvj8bXi0yed2LnY,9586 +pip/_vendor/rich/json.py,sha256=vVEoKdawoJRjAFayPwXkMBPLy7RSTs-f44wSQDR2nJ0,5031 +pip/_vendor/rich/jupyter.py,sha256=QyoKoE_8IdCbrtiSHp9TsTSNyTHY0FO5whE7jOTd9UE,3252 +pip/_vendor/rich/layout.py,sha256=ajkSFAtEVv9EFTcFs-w4uZfft7nEXhNzL7ZVdgrT5rI,14004 +pip/_vendor/rich/live.py,sha256=tF3ukAAJZ_N2ZbGclqZ-iwLoIoZ8f0HHUz79jAyJqj8,15180 +pip/_vendor/rich/live_render.py,sha256=It_39YdzrBm8o3LL0kaGorPFg-BfZWAcrBjLjFokbx4,3521 +pip/_vendor/rich/logging.py,sha256=5KaPPSMP9FxcXPBcKM4cGd_zW78PMgf-YbMVnvfSw0o,12468 +pip/_vendor/rich/markup.py,sha256=3euGKP5s41NCQwaSjTnJxus5iZMHjxpIM0W6fCxra38,8451 +pip/_vendor/rich/measure.py,sha256=HmrIJX8sWRTHbgh8MxEay_83VkqNW_70s8aKP5ZcYI8,5305 +pip/_vendor/rich/padding.py,sha256=KVEI3tOwo9sgK1YNSuH__M1_jUWmLZwRVV_KmOtVzyM,4908 +pip/_vendor/rich/pager.py,sha256=SO_ETBFKbg3n_AgOzXm41Sv36YxXAyI3_R-KOY2_uSc,828 +pip/_vendor/rich/palette.py,sha256=lInvR1ODDT2f3UZMfL1grq7dY_pDdKHw4bdUgOGaM4Y,3396 +pip/_vendor/rich/panel.py,sha256=9sQl00hPIqH5G2gALQo4NepFwpP0k9wT-s_gOms5pIc,11157 +pip/_vendor/rich/pretty.py,sha256=gy3S72u4FRg2ytoo7N1ZDWDIvB4unbzd5iUGdgm-8fc,36391 +pip/_vendor/rich/progress.py,sha256=CUc2lkU-X59mVdGfjMCBkZeiGPL3uxdONjhNJF2T7wY,60408 +pip/_vendor/rich/progress_bar.py,sha256=mZTPpJUwcfcdgQCTTz3kyY-fc79ddLwtx6Ghhxfo064,8162 +pip/_vendor/rich/prompt.py,sha256=l0RhQU-0UVTV9e08xW1BbIj0Jq2IXyChX4lC0lFNzt4,12447 +pip/_vendor/rich/protocol.py,sha256=5hHHDDNHckdk8iWH5zEbi-zuIVSF5hbU2jIo47R7lTE,1391 +pip/_vendor/rich/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/rich/region.py,sha256=rNT9xZrVZTYIXZC0NYn41CJQwYNbR-KecPOxTgQvB8Y,166 +pip/_vendor/rich/repr.py,sha256=5MZJZmONgC6kud-QW-_m1okXwL2aR6u6y-pUcUCJz28,4431 +pip/_vendor/rich/rule.py,sha256=0fNaS_aERa3UMRc3T5WMpN_sumtDxfaor2y3of1ftBk,4602 +pip/_vendor/rich/scope.py,sha256=TMUU8qo17thyqQCPqjDLYpg_UU1k5qVd-WwiJvnJVas,2843 +pip/_vendor/rich/screen.py,sha256=YoeReESUhx74grqb0mSSb9lghhysWmFHYhsbMVQjXO8,1591 +pip/_vendor/rich/segment.py,sha256=otnKeKGEV-WRlQVosfJVeFDcDxAKHpvJ_hLzSu5lumM,24743 +pip/_vendor/rich/spinner.py,sha256=onIhpKlljRHppTZasxO8kXgtYyCHUkpSgKglRJ3o51g,4214 +pip/_vendor/rich/status.py,sha256=kkPph3YeAZBo-X-4wPp8gTqZyU466NLwZBA4PZTTewo,4424 +pip/_vendor/rich/style.py,sha256=xpj4uMBZMtuNuNomfUiamigl3p1sDvTCZwrG1tcTVeg,27059 +pip/_vendor/rich/styled.py,sha256=eZNnzGrI4ki_54pgY3Oj0T-x3lxdXTYh4_ryDB24wBU,1258 +pip/_vendor/rich/syntax.py,sha256=eDKIRwl--eZ0Lwo2da2RRtfutXGavrJO61Cl5OkS59U,36371 +pip/_vendor/rich/table.py,sha256=ZmT7V7MMCOYKw7TGY9SZLyYDf6JdM-WVf07FdVuVhTI,40049 +pip/_vendor/rich/terminal_theme.py,sha256=1j5-ufJfnvlAo5Qsi_ACZiXDmwMXzqgmFByObT9-yJY,3370 +pip/_vendor/rich/text.py,sha256=AO7JPCz6-gaN1thVLXMBntEmDPVYFgFNG1oM61_sanU,47552 +pip/_vendor/rich/theme.py,sha256=oNyhXhGagtDlbDye3tVu3esWOWk0vNkuxFw-_unlaK0,3771 +pip/_vendor/rich/themes.py,sha256=0xgTLozfabebYtcJtDdC5QkX5IVUEaviqDUJJh4YVFk,102 +pip/_vendor/rich/traceback.py,sha256=c0WmB_L04_UfZbLaoH982_U_s7eosxKMUiAVmDPdRYU,35861 +pip/_vendor/rich/tree.py,sha256=yWnQ6rAvRGJ3qZGqBrxS2SW2TKBTNrP0SdY8QxOFPuw,9451 +pip/_vendor/tomli/__init__.py,sha256=PhNw_eyLgdn7McJ6nrAN8yIm3dXC75vr1sVGVVwDSpA,314 +pip/_vendor/tomli/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/tomli/__pycache__/_parser.cpython-313.pyc,, +pip/_vendor/tomli/__pycache__/_re.cpython-313.pyc,, +pip/_vendor/tomli/__pycache__/_types.cpython-313.pyc,, +pip/_vendor/tomli/_parser.py,sha256=9w8LG0jB7fwmZZWB0vVXbeejDHcl4ANIJxB2scEnDlA,25591 +pip/_vendor/tomli/_re.py,sha256=sh4sBDRgO94KJZwNIrgdcyV_qQast50YvzOAUGpRDKA,3171 +pip/_vendor/tomli/_types.py,sha256=-GTG2VUqkpxwMqzmVO4F7ybKddIbAnuAHXfmWQcTi3Q,254 +pip/_vendor/tomli/py.typed,sha256=8PjyZ1aVoQpRVvt71muvuq5qE-jTFZkK-GLHkhdebmc,26 +pip/_vendor/tomli_w/__init__.py,sha256=0F8yDtXx3Uunhm874KrAcP76srsM98y7WyHQwCulZbo,169 +pip/_vendor/tomli_w/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/tomli_w/__pycache__/_writer.cpython-313.pyc,, +pip/_vendor/tomli_w/_writer.py,sha256=dsifFS2xYf1i76mmRyfz9y125xC7Z_HQ845ZKhJsYXs,6961 +pip/_vendor/tomli_w/py.typed,sha256=8PjyZ1aVoQpRVvt71muvuq5qE-jTFZkK-GLHkhdebmc,26 +pip/_vendor/truststore/__init__.py,sha256=2wRSVijjRzPLVXUzWqvdZLNsEOhDfopKLd2EKAYLwKU,1320 +pip/_vendor/truststore/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/truststore/__pycache__/_api.cpython-313.pyc,, +pip/_vendor/truststore/__pycache__/_macos.cpython-313.pyc,, +pip/_vendor/truststore/__pycache__/_openssl.cpython-313.pyc,, +pip/_vendor/truststore/__pycache__/_ssl_constants.cpython-313.pyc,, +pip/_vendor/truststore/__pycache__/_windows.cpython-313.pyc,, +pip/_vendor/truststore/_api.py,sha256=af8gEZG_vhsudia9vz4es3Vh8xAqhzQz4Cbjs6_rxus,11234 +pip/_vendor/truststore/_macos.py,sha256=nZlLkOmszUE0g6ryRwBVGY5COzPyudcsiJtDWarM5LQ,20503 +pip/_vendor/truststore/_openssl.py,sha256=LLUZ7ZGaio-i5dpKKjKCSeSufmn6T8pi9lDcFnvSyq0,2324 +pip/_vendor/truststore/_ssl_constants.py,sha256=NUD4fVKdSD02ri7-db0tnO0VqLP9aHuzmStcW7tAl08,1130 +pip/_vendor/truststore/_windows.py,sha256=rAHyKYD8M7t-bXfG8VgOVa3TpfhVhbt4rZQlO45YuP8,17993 +pip/_vendor/truststore/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/__init__.py,sha256=iXLcYiJySn0GNbWOOZDDApgBL1JgP44EZ8i1760S8Mc,3333 +pip/_vendor/urllib3/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/urllib3/__pycache__/_collections.cpython-313.pyc,, +pip/_vendor/urllib3/__pycache__/_version.cpython-313.pyc,, +pip/_vendor/urllib3/__pycache__/connection.cpython-313.pyc,, +pip/_vendor/urllib3/__pycache__/connectionpool.cpython-313.pyc,, +pip/_vendor/urllib3/__pycache__/exceptions.cpython-313.pyc,, +pip/_vendor/urllib3/__pycache__/fields.cpython-313.pyc,, +pip/_vendor/urllib3/__pycache__/filepost.cpython-313.pyc,, +pip/_vendor/urllib3/__pycache__/poolmanager.cpython-313.pyc,, +pip/_vendor/urllib3/__pycache__/request.cpython-313.pyc,, +pip/_vendor/urllib3/__pycache__/response.cpython-313.pyc,, +pip/_vendor/urllib3/_collections.py,sha256=pyASJJhW7wdOpqJj9QJA8FyGRfr8E8uUUhqUvhF0728,11372 +pip/_vendor/urllib3/_version.py,sha256=t9wGB6ooOTXXgiY66K1m6BZS1CJyXHAU8EoWDTe6Shk,64 +pip/_vendor/urllib3/connection.py,sha256=ttIA909BrbTUzwkqEe_TzZVh4JOOj7g61Ysei2mrwGg,20314 +pip/_vendor/urllib3/connectionpool.py,sha256=e2eiAwNbFNCKxj4bwDKNK-w7HIdSz3OmMxU_TIt-evQ,40408 +pip/_vendor/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-313.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-313.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-313.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-313.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-313.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-313.pyc,, +pip/_vendor/urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957 +pip/_vendor/urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-313.pyc,, +pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-313.pyc,, +pip/_vendor/urllib3/contrib/_securetransport/bindings.py,sha256=4Xk64qIkPBt09A5q-RIFUuDhNc9mXilVapm7WnYnzRw,17632 +pip/_vendor/urllib3/contrib/_securetransport/low_level.py,sha256=B2JBB2_NRP02xK6DCa1Pa9IuxrPwxzDzZbixQkb7U9M,13922 +pip/_vendor/urllib3/contrib/appengine.py,sha256=VR68eAVE137lxTgjBDwCna5UiBZTOKa01Aj_-5BaCz4,11036 +pip/_vendor/urllib3/contrib/ntlmpool.py,sha256=NlfkW7WMdW8ziqudopjHoW299og1BTWi0IeIibquFwk,4528 +pip/_vendor/urllib3/contrib/pyopenssl.py,sha256=hDJh4MhyY_p-oKlFcYcQaVQRDv6GMmBGuW9yjxyeejM,17081 +pip/_vendor/urllib3/contrib/securetransport.py,sha256=Fef1IIUUFHqpevzXiDPbIGkDKchY2FVKeVeLGR1Qq3g,34446 +pip/_vendor/urllib3/contrib/socks.py,sha256=aRi9eWXo9ZEb95XUxef4Z21CFlnnjbEiAo9HOseoMt4,7097 +pip/_vendor/urllib3/exceptions.py,sha256=0Mnno3KHTNfXRfY7638NufOPkUb6mXOm-Lqj-4x2w8A,8217 +pip/_vendor/urllib3/fields.py,sha256=kvLDCg_JmH1lLjUUEY_FLS8UhY7hBvDPuVETbY8mdrM,8579 +pip/_vendor/urllib3/filepost.py,sha256=5b_qqgRHVlL7uLtdAYBzBh-GHmU5AfJVt_2N0XS3PeY,2440 +pip/_vendor/urllib3/packages/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/urllib3/packages/__pycache__/six.cpython-313.pyc,, +pip/_vendor/urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-313.pyc,, +pip/_vendor/urllib3/packages/backports/__pycache__/weakref_finalize.cpython-313.pyc,, +pip/_vendor/urllib3/packages/backports/makefile.py,sha256=nbzt3i0agPVP07jqqgjhaYjMmuAi_W5E0EywZivVO8E,1417 +pip/_vendor/urllib3/packages/backports/weakref_finalize.py,sha256=tRCal5OAhNSRyb0DhHp-38AtIlCsRP8BxF3NX-6rqIA,5343 +pip/_vendor/urllib3/packages/six.py,sha256=b9LM0wBXv7E7SrbCjAm4wwN-hrH-iNxv18LgWNMMKPo,34665 +pip/_vendor/urllib3/poolmanager.py,sha256=aWyhXRtNO4JUnCSVVqKTKQd8EXTvUm1VN9pgs2bcONo,19990 +pip/_vendor/urllib3/request.py,sha256=YTWFNr7QIwh7E1W9dde9LM77v2VWTJ5V78XuTTw7D1A,6691 +pip/_vendor/urllib3/response.py,sha256=fmDJAFkG71uFTn-sVSTh2Iw0WmcXQYqkbRjihvwBjU8,30641 +pip/_vendor/urllib3/util/__init__.py,sha256=JEmSmmqqLyaw8P51gUImZh8Gwg9i1zSe-DoqAitn2nc,1155 +pip/_vendor/urllib3/util/__pycache__/__init__.cpython-313.pyc,, +pip/_vendor/urllib3/util/__pycache__/connection.cpython-313.pyc,, +pip/_vendor/urllib3/util/__pycache__/proxy.cpython-313.pyc,, +pip/_vendor/urllib3/util/__pycache__/queue.cpython-313.pyc,, +pip/_vendor/urllib3/util/__pycache__/request.cpython-313.pyc,, +pip/_vendor/urllib3/util/__pycache__/response.cpython-313.pyc,, +pip/_vendor/urllib3/util/__pycache__/retry.cpython-313.pyc,, +pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-313.pyc,, +pip/_vendor/urllib3/util/__pycache__/ssl_match_hostname.cpython-313.pyc,, +pip/_vendor/urllib3/util/__pycache__/ssltransport.cpython-313.pyc,, +pip/_vendor/urllib3/util/__pycache__/timeout.cpython-313.pyc,, +pip/_vendor/urllib3/util/__pycache__/url.cpython-313.pyc,, +pip/_vendor/urllib3/util/__pycache__/wait.cpython-313.pyc,, +pip/_vendor/urllib3/util/connection.py,sha256=5Lx2B1PW29KxBn2T0xkN1CBgRBa3gGVJBKoQoRogEVk,4901 +pip/_vendor/urllib3/util/proxy.py,sha256=zUvPPCJrp6dOF0N4GAVbOcl6o-4uXKSrGiTkkr5vUS4,1605 +pip/_vendor/urllib3/util/queue.py,sha256=nRgX8_eX-_VkvxoX096QWoz8Ps0QHUAExILCY_7PncM,498 +pip/_vendor/urllib3/util/request.py,sha256=C0OUt2tcU6LRiQJ7YYNP9GvPrSvl7ziIBekQ-5nlBZk,3997 +pip/_vendor/urllib3/util/response.py,sha256=GJpg3Egi9qaJXRwBh5wv-MNuRWan5BIu40oReoxWP28,3510 +pip/_vendor/urllib3/util/retry.py,sha256=6ENvOZ8PBDzh8kgixpql9lIrb2dxH-k7ZmBanJF2Ng4,22050 +pip/_vendor/urllib3/util/ssl_.py,sha256=QDuuTxPSCj1rYtZ4xpD7Ux-r20TD50aHyqKyhQ7Bq4A,17460 +pip/_vendor/urllib3/util/ssl_match_hostname.py,sha256=Ir4cZVEjmAk8gUAIHWSi7wtOO83UCYABY2xFD1Ql_WA,5758 +pip/_vendor/urllib3/util/ssltransport.py,sha256=NA-u5rMTrDFDFC8QzRKUEKMG0561hOD4qBTr3Z4pv6E,6895 +pip/_vendor/urllib3/util/timeout.py,sha256=cwq4dMk87mJHSBktK1miYJ-85G-3T3RmT20v7SFCpno,10168 +pip/_vendor/urllib3/util/url.py,sha256=lCAE7M5myA8EDdW0sJuyyZhVB9K_j38ljWhHAnFaWoE,14296 +pip/_vendor/urllib3/util/wait.py,sha256=fOX0_faozG2P7iVojQoE1mbydweNyTcm-hXEfFrTtLI,5403 +pip/_vendor/vendor.txt,sha256=fawq8T1XFfBhs4rjjSl4fUA3Px9P2mtG2evqqPyhbhc,343 +pip/py.typed,sha256=EBVvvPRTn_eIpz5e5QztSCdrMX7Qwd7VP93RSoIlZ2I,286 diff --git a/venv/lib/python3.13/site-packages/pip-25.2.dist-info/REQUESTED b/venv/lib/python3.13/site-packages/pip-25.2.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.13/site-packages/pip-25.2.dist-info/WHEEL b/venv/lib/python3.13/site-packages/pip-25.2.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..370ec5f1aa3524bfd31a19fd21e907335f48a7f6 --- /dev/null +++ b/venv/lib/python3.13/site-packages/pip-25.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (78.1.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.13/site-packages/pip-25.2.dist-info/entry_points.txt b/venv/lib/python3.13/site-packages/pip-25.2.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..25fcf7e2cdf0943ad5c074d4777c721cd3340986 --- /dev/null +++ b/venv/lib/python3.13/site-packages/pip-25.2.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +pip = pip._internal.cli.main:main +pip3 = pip._internal.cli.main:main diff --git a/venv/lib/python3.13/site-packages/pip-25.2.dist-info/top_level.txt b/venv/lib/python3.13/site-packages/pip-25.2.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.13/site-packages/pip-25.2.dist-info/top_level.txt @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.13/site-packages/pyyaml-6.0.3.dist-info/INSTALLER b/venv/lib/python3.13/site-packages/pyyaml-6.0.3.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.13/site-packages/pyyaml-6.0.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.13/site-packages/pyyaml-6.0.3.dist-info/METADATA b/venv/lib/python3.13/site-packages/pyyaml-6.0.3.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..702ee6da5865b50b297705b077ea7d5904fead58 --- /dev/null +++ b/venv/lib/python3.13/site-packages/pyyaml-6.0.3.dist-info/METADATA @@ -0,0 +1,59 @@ +Metadata-Version: 2.4 +Name: PyYAML +Version: 6.0.3 +Summary: YAML parser and emitter for Python +Home-page: https://pyyaml.org/ +Download-URL: https://pypi.org/project/PyYAML/ +Author: Kirill Simonov +Author-email: xi@resolvent.net +License: MIT +Project-URL: Bug Tracker, https://github.com/yaml/pyyaml/issues +Project-URL: CI, https://github.com/yaml/pyyaml/actions +Project-URL: Documentation, https://pyyaml.org/wiki/PyYAMLDocumentation +Project-URL: Mailing lists, http://lists.sourceforge.net/lists/listinfo/yaml-core +Project-URL: Source Code, https://github.com/yaml/pyyaml +Platform: Any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Cython +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Markup +Requires-Python: >=3.8 +License-File: LICENSE +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: download-url +Dynamic: home-page +Dynamic: license +Dynamic: license-file +Dynamic: platform +Dynamic: project-url +Dynamic: requires-python +Dynamic: summary + +YAML is a data serialization format designed for human readability +and interaction with scripting languages. PyYAML is a YAML parser +and emitter for Python. + +PyYAML features a complete YAML 1.1 parser, Unicode support, pickle +support, capable extension API, and sensible error messages. PyYAML +supports standard YAML tags and provides Python-specific tags that +allow to represent an arbitrary Python object. + +PyYAML is applicable for a broad range of tasks from complex +configuration files to object serialization and persistence. diff --git a/venv/lib/python3.13/site-packages/pyyaml-6.0.3.dist-info/RECORD b/venv/lib/python3.13/site-packages/pyyaml-6.0.3.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..77d42b242869584c067a6c3ef67dae9d56dfdbcf --- /dev/null +++ b/venv/lib/python3.13/site-packages/pyyaml-6.0.3.dist-info/RECORD @@ -0,0 +1,43 @@ +_yaml/__init__.py,sha256=04Ae_5osxahpJHa3XBZUAf4wi6XX32gR8D6X6p64GEA,1402 +_yaml/__pycache__/__init__.cpython-313.pyc,, +pyyaml-6.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pyyaml-6.0.3.dist-info/METADATA,sha256=A8O0Fe040J-u3Ek2DpMHabQMWPaFhebeAOLkkWqFjTQ,2351 +pyyaml-6.0.3.dist-info/RECORD,, +pyyaml-6.0.3.dist-info/WHEEL,sha256=2iHh9e2o6T3nHtu_NVT7Cs7pebIqF94rZK8zrQfgoJI,190 +pyyaml-6.0.3.dist-info/licenses/LICENSE,sha256=jTko-dxEkP1jVwfLiOsmvXZBAqcoKVQwfT5RZ6V36KQ,1101 +pyyaml-6.0.3.dist-info/top_level.txt,sha256=rpj0IVMTisAjh_1vG3Ccf9v5jpCQwAz6cD1IVU5ZdhQ,11 +yaml/__init__.py,sha256=sZ38wzPWp139cwc5ARZFByUvJxtB07X32FUQAzoFR6c,12311 +yaml/__pycache__/__init__.cpython-313.pyc,, +yaml/__pycache__/composer.cpython-313.pyc,, +yaml/__pycache__/constructor.cpython-313.pyc,, +yaml/__pycache__/cyaml.cpython-313.pyc,, +yaml/__pycache__/dumper.cpython-313.pyc,, +yaml/__pycache__/emitter.cpython-313.pyc,, +yaml/__pycache__/error.cpython-313.pyc,, +yaml/__pycache__/events.cpython-313.pyc,, +yaml/__pycache__/loader.cpython-313.pyc,, +yaml/__pycache__/nodes.cpython-313.pyc,, +yaml/__pycache__/parser.cpython-313.pyc,, +yaml/__pycache__/reader.cpython-313.pyc,, +yaml/__pycache__/representer.cpython-313.pyc,, +yaml/__pycache__/resolver.cpython-313.pyc,, +yaml/__pycache__/scanner.cpython-313.pyc,, +yaml/__pycache__/serializer.cpython-313.pyc,, +yaml/__pycache__/tokens.cpython-313.pyc,, +yaml/_yaml.cpython-313-x86_64-linux-gnu.so,sha256=P5FgHX2lDz0VIegH8EggvhmMyu3CqJCU1pAxz4ko2K8,2653048 +yaml/composer.py,sha256=_Ko30Wr6eDWUeUpauUGT3Lcg9QPBnOPVlTnIMRGJ9FM,4883 +yaml/constructor.py,sha256=kNgkfaeLUkwQYY_Q6Ff1Tz2XVw_pG1xVE9Ak7z-viLA,28639 +yaml/cyaml.py,sha256=6ZrAG9fAYvdVe2FK_w0hmXoG7ZYsoYUwapG8CiC72H0,3851 +yaml/dumper.py,sha256=PLctZlYwZLp7XmeUdwRuv4nYOZ2UBnDIUy8-lKfLF-o,2837 +yaml/emitter.py,sha256=jghtaU7eFwg31bG0B7RZea_29Adi9CKmXq_QjgQpCkQ,43006 +yaml/error.py,sha256=Ah9z-toHJUbE9j-M8YpxgSRM5CgLCcwVzJgLLRF2Fxo,2533 +yaml/events.py,sha256=50_TksgQiE4up-lKo_V-nBy-tAIxkIPQxY5qDhKCeHw,2445 +yaml/loader.py,sha256=UVa-zIqmkFSCIYq_PgSGm4NSJttHY2Rf_zQ4_b1fHN0,2061 +yaml/nodes.py,sha256=gPKNj8pKCdh2d4gr3gIYINnPOaOxGhJAUiYhGRnPE84,1440 +yaml/parser.py,sha256=ilWp5vvgoHFGzvOZDItFoGjD6D42nhlZrZyjAwa0oJo,25495 +yaml/reader.py,sha256=0dmzirOiDG4Xo41RnuQS7K9rkY3xjHiVasfDMNTqCNw,6794 +yaml/representer.py,sha256=IuWP-cAW9sHKEnS0gCqSa894k1Bg4cgTxaDwIcbRQ-Y,14190 +yaml/resolver.py,sha256=9L-VYfm4mWHxUD1Vg4X7rjDRK_7VZd6b92wzq7Y2IKY,9004 +yaml/scanner.py,sha256=YEM3iLZSaQwXcQRg2l2R4MdT0zGP2F9eHkKGKnHyWQY,51279 +yaml/serializer.py,sha256=ChuFgmhU01hj4xgI8GaKv6vfM2Bujwa9i7d2FAHj7cA,4165 +yaml/tokens.py,sha256=lTQIzSVw8Mg9wv459-TjiOQe6wVziqaRlqX2_89rp54,2573 diff --git a/venv/lib/python3.13/site-packages/pyyaml-6.0.3.dist-info/WHEEL b/venv/lib/python3.13/site-packages/pyyaml-6.0.3.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..539ecdef0c43b82970407bf0d2e868801a0862df --- /dev/null +++ b/venv/lib/python3.13/site-packages/pyyaml-6.0.3.dist-info/WHEEL @@ -0,0 +1,7 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: false +Tag: cp313-cp313-manylinux_2_17_x86_64 +Tag: cp313-cp313-manylinux2014_x86_64 +Tag: cp313-cp313-manylinux_2_28_x86_64 + diff --git a/venv/lib/python3.13/site-packages/pyyaml-6.0.3.dist-info/top_level.txt b/venv/lib/python3.13/site-packages/pyyaml-6.0.3.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..e6475e911f628412049bc4090d86f23ac403adde --- /dev/null +++ b/venv/lib/python3.13/site-packages/pyyaml-6.0.3.dist-info/top_level.txt @@ -0,0 +1,2 @@ +_yaml +yaml diff --git a/venv/lib/python3.13/site-packages/requests/__init__.py b/venv/lib/python3.13/site-packages/requests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..051cda1340effaa0706b46dd68ac002ceda3d45c --- /dev/null +++ b/venv/lib/python3.13/site-packages/requests/__init__.py @@ -0,0 +1,184 @@ +# __ +# /__) _ _ _ _ _/ _ +# / ( (- (/ (/ (- _) / _) +# / + +""" +Requests HTTP Library +~~~~~~~~~~~~~~~~~~~~~ + +Requests is an HTTP library, written in Python, for human beings. +Basic GET usage: + + >>> import requests + >>> r = requests.get('https://www.python.org') + >>> r.status_code + 200 + >>> b'Python is a programming language' in r.content + True + +... or POST: + + >>> payload = dict(key1='value1', key2='value2') + >>> r = requests.post('https://httpbin.org/post', data=payload) + >>> print(r.text) + { + ... + "form": { + "key1": "value1", + "key2": "value2" + }, + ... + } + +The other HTTP methods are supported - see `requests.api`. Full documentation +is at . + +:copyright: (c) 2017 by Kenneth Reitz. +:license: Apache 2.0, see LICENSE for more details. +""" + +import warnings + +import urllib3 + +from .exceptions import RequestsDependencyWarning + +try: + from charset_normalizer import __version__ as charset_normalizer_version +except ImportError: + charset_normalizer_version = None + +try: + from chardet import __version__ as chardet_version +except ImportError: + chardet_version = None + + +def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version): + urllib3_version = urllib3_version.split(".") + assert urllib3_version != ["dev"] # Verify urllib3 isn't installed from git. + + # Sometimes, urllib3 only reports its version as 16.1. + if len(urllib3_version) == 2: + urllib3_version.append("0") + + # Check urllib3 for compatibility. + major, minor, patch = urllib3_version # noqa: F811 + major, minor, patch = int(major), int(minor), int(patch) + # urllib3 >= 1.21.1 + assert major >= 1 + if major == 1: + assert minor >= 21 + + # Check charset_normalizer for compatibility. + if chardet_version: + major, minor, patch = chardet_version.split(".")[:3] + major, minor, patch = int(major), int(minor), int(patch) + # chardet_version >= 3.0.2, < 6.0.0 + assert (3, 0, 2) <= (major, minor, patch) < (6, 0, 0) + elif charset_normalizer_version: + major, minor, patch = charset_normalizer_version.split(".")[:3] + major, minor, patch = int(major), int(minor), int(patch) + # charset_normalizer >= 2.0.0 < 4.0.0 + assert (2, 0, 0) <= (major, minor, patch) < (4, 0, 0) + else: + warnings.warn( + "Unable to find acceptable character detection dependency " + "(chardet or charset_normalizer).", + RequestsDependencyWarning, + ) + + +def _check_cryptography(cryptography_version): + # cryptography < 1.3.4 + try: + cryptography_version = list(map(int, cryptography_version.split("."))) + except ValueError: + return + + if cryptography_version < [1, 3, 4]: + warning = "Old version of cryptography ({}) may cause slowdown.".format( + cryptography_version + ) + warnings.warn(warning, RequestsDependencyWarning) + + +# Check imported dependencies for compatibility. +try: + check_compatibility( + urllib3.__version__, chardet_version, charset_normalizer_version + ) +except (AssertionError, ValueError): + warnings.warn( + "urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported " + "version!".format( + urllib3.__version__, chardet_version, charset_normalizer_version + ), + RequestsDependencyWarning, + ) + +# Attempt to enable urllib3's fallback for SNI support +# if the standard library doesn't support SNI or the +# 'ssl' library isn't available. +try: + try: + import ssl + except ImportError: + ssl = None + + if not getattr(ssl, "HAS_SNI", False): + from urllib3.contrib import pyopenssl + + pyopenssl.inject_into_urllib3() + + # Check cryptography version + from cryptography import __version__ as cryptography_version + + _check_cryptography(cryptography_version) +except ImportError: + pass + +# urllib3's DependencyWarnings should be silenced. +from urllib3.exceptions import DependencyWarning + +warnings.simplefilter("ignore", DependencyWarning) + +# Set default logging handler to avoid "No handler found" warnings. +import logging +from logging import NullHandler + +from . import packages, utils +from .__version__ import ( + __author__, + __author_email__, + __build__, + __cake__, + __copyright__, + __description__, + __license__, + __title__, + __url__, + __version__, +) +from .api import delete, get, head, options, patch, post, put, request +from .exceptions import ( + ConnectionError, + ConnectTimeout, + FileModeWarning, + HTTPError, + JSONDecodeError, + ReadTimeout, + RequestException, + Timeout, + TooManyRedirects, + URLRequired, +) +from .models import PreparedRequest, Request, Response +from .sessions import Session, session +from .status_codes import codes + +logging.getLogger(__name__).addHandler(NullHandler()) + +# FileModeWarnings go off per the default. +warnings.simplefilter("default", FileModeWarning, append=True) diff --git a/venv/lib/python3.13/site-packages/requests/__version__.py b/venv/lib/python3.13/site-packages/requests/__version__.py new file mode 100644 index 0000000000000000000000000000000000000000..effdd98cf159371a36cb3ffaa8ad3e31ee3885f2 --- /dev/null +++ b/venv/lib/python3.13/site-packages/requests/__version__.py @@ -0,0 +1,14 @@ +# .-. .-. .-. . . .-. .-. .-. .-. +# |( |- |.| | | |- `-. | `-. +# ' ' `-' `-`.`-' `-' `-' ' `-' + +__title__ = "requests" +__description__ = "Python HTTP for Humans." +__url__ = "https://requests.readthedocs.io" +__version__ = "2.32.5" +__build__ = 0x023205 +__author__ = "Kenneth Reitz" +__author_email__ = "me@kennethreitz.org" +__license__ = "Apache-2.0" +__copyright__ = "Copyright Kenneth Reitz" +__cake__ = "\u2728 \U0001f370 \u2728" diff --git a/venv/lib/python3.13/site-packages/requests/_internal_utils.py b/venv/lib/python3.13/site-packages/requests/_internal_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..f2cf635e2937ee9b123a1498c5c5f723a6e20084 --- /dev/null +++ b/venv/lib/python3.13/site-packages/requests/_internal_utils.py @@ -0,0 +1,50 @@ +""" +requests._internal_utils +~~~~~~~~~~~~~~ + +Provides utility functions that are consumed internally by Requests +which depend on extremely few external helpers (such as compat) +""" +import re + +from .compat import builtin_str + +_VALID_HEADER_NAME_RE_BYTE = re.compile(rb"^[^:\s][^:\r\n]*$") +_VALID_HEADER_NAME_RE_STR = re.compile(r"^[^:\s][^:\r\n]*$") +_VALID_HEADER_VALUE_RE_BYTE = re.compile(rb"^\S[^\r\n]*$|^$") +_VALID_HEADER_VALUE_RE_STR = re.compile(r"^\S[^\r\n]*$|^$") + +_HEADER_VALIDATORS_STR = (_VALID_HEADER_NAME_RE_STR, _VALID_HEADER_VALUE_RE_STR) +_HEADER_VALIDATORS_BYTE = (_VALID_HEADER_NAME_RE_BYTE, _VALID_HEADER_VALUE_RE_BYTE) +HEADER_VALIDATORS = { + bytes: _HEADER_VALIDATORS_BYTE, + str: _HEADER_VALIDATORS_STR, +} + + +def to_native_string(string, encoding="ascii"): + """Given a string object, regardless of type, returns a representation of + that string in the native string type, encoding and decoding where + necessary. This assumes ASCII unless told otherwise. + """ + if isinstance(string, builtin_str): + out = string + else: + out = string.decode(encoding) + + return out + + +def unicode_is_ascii(u_string): + """Determine if unicode string only contains ASCII characters. + + :param str u_string: unicode string to check. Must be unicode + and not Python 2 `str`. + :rtype: bool + """ + assert isinstance(u_string, str) + try: + u_string.encode("ascii") + return True + except UnicodeEncodeError: + return False diff --git a/venv/lib/python3.13/site-packages/requests/adapters.py b/venv/lib/python3.13/site-packages/requests/adapters.py new file mode 100644 index 0000000000000000000000000000000000000000..670c92767c5420a21538688132ce1f4f45e45873 --- /dev/null +++ b/venv/lib/python3.13/site-packages/requests/adapters.py @@ -0,0 +1,696 @@ +""" +requests.adapters +~~~~~~~~~~~~~~~~~ + +This module contains the transport adapters that Requests uses to define +and maintain connections. +""" + +import os.path +import socket # noqa: F401 +import typing +import warnings + +from urllib3.exceptions import ClosedPoolError, ConnectTimeoutError +from urllib3.exceptions import HTTPError as _HTTPError +from urllib3.exceptions import InvalidHeader as _InvalidHeader +from urllib3.exceptions import ( + LocationValueError, + MaxRetryError, + NewConnectionError, + ProtocolError, +) +from urllib3.exceptions import ProxyError as _ProxyError +from urllib3.exceptions import ReadTimeoutError, ResponseError +from urllib3.exceptions import SSLError as _SSLError +from urllib3.poolmanager import PoolManager, proxy_from_url +from urllib3.util import Timeout as TimeoutSauce +from urllib3.util import parse_url +from urllib3.util.retry import Retry + +from .auth import _basic_auth_str +from .compat import basestring, urlparse +from .cookies import extract_cookies_to_jar +from .exceptions import ( + ConnectionError, + ConnectTimeout, + InvalidHeader, + InvalidProxyURL, + InvalidSchema, + InvalidURL, + ProxyError, + ReadTimeout, + RetryError, + SSLError, +) +from .models import Response +from .structures import CaseInsensitiveDict +from .utils import ( + DEFAULT_CA_BUNDLE_PATH, + extract_zipped_paths, + get_auth_from_url, + get_encoding_from_headers, + prepend_scheme_if_needed, + select_proxy, + urldefragauth, +) + +try: + from urllib3.contrib.socks import SOCKSProxyManager +except ImportError: + + def SOCKSProxyManager(*args, **kwargs): + raise InvalidSchema("Missing dependencies for SOCKS support.") + + +if typing.TYPE_CHECKING: + from .models import PreparedRequest + + +DEFAULT_POOLBLOCK = False +DEFAULT_POOLSIZE = 10 +DEFAULT_RETRIES = 0 +DEFAULT_POOL_TIMEOUT = None + + +def _urllib3_request_context( + request: "PreparedRequest", + verify: "bool | str | None", + client_cert: "typing.Tuple[str, str] | str | None", + poolmanager: "PoolManager", +) -> "(typing.Dict[str, typing.Any], typing.Dict[str, typing.Any])": + host_params = {} + pool_kwargs = {} + parsed_request_url = urlparse(request.url) + scheme = parsed_request_url.scheme.lower() + port = parsed_request_url.port + + cert_reqs = "CERT_REQUIRED" + if verify is False: + cert_reqs = "CERT_NONE" + elif isinstance(verify, str): + if not os.path.isdir(verify): + pool_kwargs["ca_certs"] = verify + else: + pool_kwargs["ca_cert_dir"] = verify + pool_kwargs["cert_reqs"] = cert_reqs + if client_cert is not None: + if isinstance(client_cert, tuple) and len(client_cert) == 2: + pool_kwargs["cert_file"] = client_cert[0] + pool_kwargs["key_file"] = client_cert[1] + else: + # According to our docs, we allow users to specify just the client + # cert path + pool_kwargs["cert_file"] = client_cert + host_params = { + "scheme": scheme, + "host": parsed_request_url.hostname, + "port": port, + } + return host_params, pool_kwargs + + +class BaseAdapter: + """The Base Transport Adapter""" + + def __init__(self): + super().__init__() + + def send( + self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None + ): + """Sends PreparedRequest object. Returns Response object. + + :param request: The :class:`PreparedRequest ` being sent. + :param stream: (optional) Whether to stream the request content. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) ` tuple. + :type timeout: float or tuple + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use + :param cert: (optional) Any user-provided SSL certificate to be trusted. + :param proxies: (optional) The proxies dictionary to apply to the request. + """ + raise NotImplementedError + + def close(self): + """Cleans up adapter specific items.""" + raise NotImplementedError + + +class HTTPAdapter(BaseAdapter): + """The built-in HTTP Adapter for urllib3. + + Provides a general-case interface for Requests sessions to contact HTTP and + HTTPS urls by implementing the Transport Adapter interface. This class will + usually be created by the :class:`Session ` class under the + covers. + + :param pool_connections: The number of urllib3 connection pools to cache. + :param pool_maxsize: The maximum number of connections to save in the pool. + :param max_retries: The maximum number of retries each connection + should attempt. Note, this applies only to failed DNS lookups, socket + connections and connection timeouts, never to requests where data has + made it to the server. By default, Requests does not retry failed + connections. If you need granular control over the conditions under + which we retry a request, import urllib3's ``Retry`` class and pass + that instead. + :param pool_block: Whether the connection pool should block for connections. + + Usage:: + + >>> import requests + >>> s = requests.Session() + >>> a = requests.adapters.HTTPAdapter(max_retries=3) + >>> s.mount('http://', a) + """ + + __attrs__ = [ + "max_retries", + "config", + "_pool_connections", + "_pool_maxsize", + "_pool_block", + ] + + def __init__( + self, + pool_connections=DEFAULT_POOLSIZE, + pool_maxsize=DEFAULT_POOLSIZE, + max_retries=DEFAULT_RETRIES, + pool_block=DEFAULT_POOLBLOCK, + ): + if max_retries == DEFAULT_RETRIES: + self.max_retries = Retry(0, read=False) + else: + self.max_retries = Retry.from_int(max_retries) + self.config = {} + self.proxy_manager = {} + + super().__init__() + + self._pool_connections = pool_connections + self._pool_maxsize = pool_maxsize + self._pool_block = pool_block + + self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block) + + def __getstate__(self): + return {attr: getattr(self, attr, None) for attr in self.__attrs__} + + def __setstate__(self, state): + # Can't handle by adding 'proxy_manager' to self.__attrs__ because + # self.poolmanager uses a lambda function, which isn't pickleable. + self.proxy_manager = {} + self.config = {} + + for attr, value in state.items(): + setattr(self, attr, value) + + self.init_poolmanager( + self._pool_connections, self._pool_maxsize, block=self._pool_block + ) + + def init_poolmanager( + self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs + ): + """Initializes a urllib3 PoolManager. + + This method should not be called from user code, and is only + exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param connections: The number of urllib3 connection pools to cache. + :param maxsize: The maximum number of connections to save in the pool. + :param block: Block when no free connections are available. + :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager. + """ + # save these values for pickling + self._pool_connections = connections + self._pool_maxsize = maxsize + self._pool_block = block + + self.poolmanager = PoolManager( + num_pools=connections, + maxsize=maxsize, + block=block, + **pool_kwargs, + ) + + def proxy_manager_for(self, proxy, **proxy_kwargs): + """Return urllib3 ProxyManager for the given proxy. + + This method should not be called from user code, and is only + exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param proxy: The proxy to return a urllib3 ProxyManager for. + :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. + :returns: ProxyManager + :rtype: urllib3.ProxyManager + """ + if proxy in self.proxy_manager: + manager = self.proxy_manager[proxy] + elif proxy.lower().startswith("socks"): + username, password = get_auth_from_url(proxy) + manager = self.proxy_manager[proxy] = SOCKSProxyManager( + proxy, + username=username, + password=password, + num_pools=self._pool_connections, + maxsize=self._pool_maxsize, + block=self._pool_block, + **proxy_kwargs, + ) + else: + proxy_headers = self.proxy_headers(proxy) + manager = self.proxy_manager[proxy] = proxy_from_url( + proxy, + proxy_headers=proxy_headers, + num_pools=self._pool_connections, + maxsize=self._pool_maxsize, + block=self._pool_block, + **proxy_kwargs, + ) + + return manager + + def cert_verify(self, conn, url, verify, cert): + """Verify a SSL certificate. This method should not be called from user + code, and is only exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param conn: The urllib3 connection object associated with the cert. + :param url: The requested URL. + :param verify: Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use + :param cert: The SSL certificate to verify. + """ + if url.lower().startswith("https") and verify: + cert_loc = None + + # Allow self-specified cert location. + if verify is not True: + cert_loc = verify + + if not cert_loc: + cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH) + + if not cert_loc or not os.path.exists(cert_loc): + raise OSError( + f"Could not find a suitable TLS CA certificate bundle, " + f"invalid path: {cert_loc}" + ) + + conn.cert_reqs = "CERT_REQUIRED" + + if not os.path.isdir(cert_loc): + conn.ca_certs = cert_loc + else: + conn.ca_cert_dir = cert_loc + else: + conn.cert_reqs = "CERT_NONE" + conn.ca_certs = None + conn.ca_cert_dir = None + + if cert: + if not isinstance(cert, basestring): + conn.cert_file = cert[0] + conn.key_file = cert[1] + else: + conn.cert_file = cert + conn.key_file = None + if conn.cert_file and not os.path.exists(conn.cert_file): + raise OSError( + f"Could not find the TLS certificate file, " + f"invalid path: {conn.cert_file}" + ) + if conn.key_file and not os.path.exists(conn.key_file): + raise OSError( + f"Could not find the TLS key file, invalid path: {conn.key_file}" + ) + + def build_response(self, req, resp): + """Builds a :class:`Response ` object from a urllib3 + response. This should not be called from user code, and is only exposed + for use when subclassing the + :class:`HTTPAdapter ` + + :param req: The :class:`PreparedRequest ` used to generate the response. + :param resp: The urllib3 response object. + :rtype: requests.Response + """ + response = Response() + + # Fallback to None if there's no status_code, for whatever reason. + response.status_code = getattr(resp, "status", None) + + # Make headers case-insensitive. + response.headers = CaseInsensitiveDict(getattr(resp, "headers", {})) + + # Set encoding. + response.encoding = get_encoding_from_headers(response.headers) + response.raw = resp + response.reason = response.raw.reason + + if isinstance(req.url, bytes): + response.url = req.url.decode("utf-8") + else: + response.url = req.url + + # Add new cookies from the server. + extract_cookies_to_jar(response.cookies, req, resp) + + # Give the Response some context. + response.request = req + response.connection = self + + return response + + def build_connection_pool_key_attributes(self, request, verify, cert=None): + """Build the PoolKey attributes used by urllib3 to return a connection. + + This looks at the PreparedRequest, the user-specified verify value, + and the value of the cert parameter to determine what PoolKey values + to use to select a connection from a given urllib3 Connection Pool. + + The SSL related pool key arguments are not consistently set. As of + this writing, use the following to determine what keys may be in that + dictionary: + + * If ``verify`` is ``True``, ``"ssl_context"`` will be set and will be the + default Requests SSL Context + * If ``verify`` is ``False``, ``"ssl_context"`` will not be set but + ``"cert_reqs"`` will be set + * If ``verify`` is a string, (i.e., it is a user-specified trust bundle) + ``"ca_certs"`` will be set if the string is not a directory recognized + by :py:func:`os.path.isdir`, otherwise ``"ca_cert_dir"`` will be + set. + * If ``"cert"`` is specified, ``"cert_file"`` will always be set. If + ``"cert"`` is a tuple with a second item, ``"key_file"`` will also + be present + + To override these settings, one may subclass this class, call this + method and use the above logic to change parameters as desired. For + example, if one wishes to use a custom :py:class:`ssl.SSLContext` one + must both set ``"ssl_context"`` and based on what else they require, + alter the other keys to ensure the desired behaviour. + + :param request: + The PreparedReqest being sent over the connection. + :type request: + :class:`~requests.models.PreparedRequest` + :param verify: + Either a boolean, in which case it controls whether + we verify the server's TLS certificate, or a string, in which case it + must be a path to a CA bundle to use. + :param cert: + (optional) Any user-provided SSL certificate for client + authentication (a.k.a., mTLS). This may be a string (i.e., just + the path to a file which holds both certificate and key) or a + tuple of length 2 with the certificate file path and key file + path. + :returns: + A tuple of two dictionaries. The first is the "host parameters" + portion of the Pool Key including scheme, hostname, and port. The + second is a dictionary of SSLContext related parameters. + """ + return _urllib3_request_context(request, verify, cert, self.poolmanager) + + def get_connection_with_tls_context(self, request, verify, proxies=None, cert=None): + """Returns a urllib3 connection for the given request and TLS settings. + This should not be called from user code, and is only exposed for use + when subclassing the :class:`HTTPAdapter `. + + :param request: + The :class:`PreparedRequest ` object to be sent + over the connection. + :param verify: + Either a boolean, in which case it controls whether we verify the + server's TLS certificate, or a string, in which case it must be a + path to a CA bundle to use. + :param proxies: + (optional) The proxies dictionary to apply to the request. + :param cert: + (optional) Any user-provided SSL certificate to be used for client + authentication (a.k.a., mTLS). + :rtype: + urllib3.ConnectionPool + """ + proxy = select_proxy(request.url, proxies) + try: + host_params, pool_kwargs = self.build_connection_pool_key_attributes( + request, + verify, + cert, + ) + except ValueError as e: + raise InvalidURL(e, request=request) + if proxy: + proxy = prepend_scheme_if_needed(proxy, "http") + proxy_url = parse_url(proxy) + if not proxy_url.host: + raise InvalidProxyURL( + "Please check proxy URL. It is malformed " + "and could be missing the host." + ) + proxy_manager = self.proxy_manager_for(proxy) + conn = proxy_manager.connection_from_host( + **host_params, pool_kwargs=pool_kwargs + ) + else: + # Only scheme should be lower case + conn = self.poolmanager.connection_from_host( + **host_params, pool_kwargs=pool_kwargs + ) + + return conn + + def get_connection(self, url, proxies=None): + """DEPRECATED: Users should move to `get_connection_with_tls_context` + for all subclasses of HTTPAdapter using Requests>=2.32.2. + + Returns a urllib3 connection for the given URL. This should not be + called from user code, and is only exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param url: The URL to connect to. + :param proxies: (optional) A Requests-style dictionary of proxies used on this request. + :rtype: urllib3.ConnectionPool + """ + warnings.warn( + ( + "`get_connection` has been deprecated in favor of " + "`get_connection_with_tls_context`. Custom HTTPAdapter subclasses " + "will need to migrate for Requests>=2.32.2. Please see " + "https://github.com/psf/requests/pull/6710 for more details." + ), + DeprecationWarning, + ) + proxy = select_proxy(url, proxies) + + if proxy: + proxy = prepend_scheme_if_needed(proxy, "http") + proxy_url = parse_url(proxy) + if not proxy_url.host: + raise InvalidProxyURL( + "Please check proxy URL. It is malformed " + "and could be missing the host." + ) + proxy_manager = self.proxy_manager_for(proxy) + conn = proxy_manager.connection_from_url(url) + else: + # Only scheme should be lower case + parsed = urlparse(url) + url = parsed.geturl() + conn = self.poolmanager.connection_from_url(url) + + return conn + + def close(self): + """Disposes of any internal state. + + Currently, this closes the PoolManager and any active ProxyManager, + which closes any pooled connections. + """ + self.poolmanager.clear() + for proxy in self.proxy_manager.values(): + proxy.clear() + + def request_url(self, request, proxies): + """Obtain the url to use when making the final request. + + If the message is being sent through a HTTP proxy, the full URL has to + be used. Otherwise, we should only use the path portion of the URL. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter `. + + :param request: The :class:`PreparedRequest ` being sent. + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs. + :rtype: str + """ + proxy = select_proxy(request.url, proxies) + scheme = urlparse(request.url).scheme + + is_proxied_http_request = proxy and scheme != "https" + using_socks_proxy = False + if proxy: + proxy_scheme = urlparse(proxy).scheme.lower() + using_socks_proxy = proxy_scheme.startswith("socks") + + url = request.path_url + if url.startswith("//"): # Don't confuse urllib3 + url = f"/{url.lstrip('/')}" + + if is_proxied_http_request and not using_socks_proxy: + url = urldefragauth(request.url) + + return url + + def add_headers(self, request, **kwargs): + """Add any headers needed by the connection. As of v2.0 this does + nothing by default, but is left for overriding by users that subclass + the :class:`HTTPAdapter `. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter `. + + :param request: The :class:`PreparedRequest ` to add headers to. + :param kwargs: The keyword arguments from the call to send(). + """ + pass + + def proxy_headers(self, proxy): + """Returns a dictionary of the headers to add to any request sent + through a proxy. This works with urllib3 magic to ensure that they are + correctly sent to the proxy, rather than in a tunnelled request if + CONNECT is being used. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter `. + + :param proxy: The url of the proxy being used for this request. + :rtype: dict + """ + headers = {} + username, password = get_auth_from_url(proxy) + + if username: + headers["Proxy-Authorization"] = _basic_auth_str(username, password) + + return headers + + def send( + self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None + ): + """Sends PreparedRequest object. Returns Response object. + + :param request: The :class:`PreparedRequest ` being sent. + :param stream: (optional) Whether to stream the request content. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) ` tuple. + :type timeout: float or tuple or urllib3 Timeout object + :param verify: (optional) Either a boolean, in which case it controls whether + we verify the server's TLS certificate, or a string, in which case it + must be a path to a CA bundle to use + :param cert: (optional) Any user-provided SSL certificate to be trusted. + :param proxies: (optional) The proxies dictionary to apply to the request. + :rtype: requests.Response + """ + + try: + conn = self.get_connection_with_tls_context( + request, verify, proxies=proxies, cert=cert + ) + except LocationValueError as e: + raise InvalidURL(e, request=request) + + self.cert_verify(conn, request.url, verify, cert) + url = self.request_url(request, proxies) + self.add_headers( + request, + stream=stream, + timeout=timeout, + verify=verify, + cert=cert, + proxies=proxies, + ) + + chunked = not (request.body is None or "Content-Length" in request.headers) + + if isinstance(timeout, tuple): + try: + connect, read = timeout + timeout = TimeoutSauce(connect=connect, read=read) + except ValueError: + raise ValueError( + f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, " + f"or a single float to set both timeouts to the same value." + ) + elif isinstance(timeout, TimeoutSauce): + pass + else: + timeout = TimeoutSauce(connect=timeout, read=timeout) + + try: + resp = conn.urlopen( + method=request.method, + url=url, + body=request.body, + headers=request.headers, + redirect=False, + assert_same_host=False, + preload_content=False, + decode_content=False, + retries=self.max_retries, + timeout=timeout, + chunked=chunked, + ) + + except (ProtocolError, OSError) as err: + raise ConnectionError(err, request=request) + + except MaxRetryError as e: + if isinstance(e.reason, ConnectTimeoutError): + # TODO: Remove this in 3.0.0: see #2811 + if not isinstance(e.reason, NewConnectionError): + raise ConnectTimeout(e, request=request) + + if isinstance(e.reason, ResponseError): + raise RetryError(e, request=request) + + if isinstance(e.reason, _ProxyError): + raise ProxyError(e, request=request) + + if isinstance(e.reason, _SSLError): + # This branch is for urllib3 v1.22 and later. + raise SSLError(e, request=request) + + raise ConnectionError(e, request=request) + + except ClosedPoolError as e: + raise ConnectionError(e, request=request) + + except _ProxyError as e: + raise ProxyError(e) + + except (_SSLError, _HTTPError) as e: + if isinstance(e, _SSLError): + # This branch is for urllib3 versions earlier than v1.22 + raise SSLError(e, request=request) + elif isinstance(e, ReadTimeoutError): + raise ReadTimeout(e, request=request) + elif isinstance(e, _InvalidHeader): + raise InvalidHeader(e, request=request) + else: + raise + + return self.build_response(request, resp) diff --git a/venv/lib/python3.13/site-packages/requests/api.py b/venv/lib/python3.13/site-packages/requests/api.py new file mode 100644 index 0000000000000000000000000000000000000000..5960744552e7f8eea815429e7bdad38b0cc2741d --- /dev/null +++ b/venv/lib/python3.13/site-packages/requests/api.py @@ -0,0 +1,157 @@ +""" +requests.api +~~~~~~~~~~~~ + +This module implements the Requests API. + +:copyright: (c) 2012 by Kenneth Reitz. +:license: Apache2, see LICENSE for more details. +""" + +from . import sessions + + +def request(method, url, **kwargs): + """Constructs and sends a :class:`Request `. + + :param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``. + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary, list of tuples or bytes to send + in the query string for the :class:`Request`. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. + :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. + :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. + ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` + or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content_type'`` is a string + defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers + to add for the file. + :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. + :param timeout: (optional) How many seconds to wait for the server to send data + before giving up, as a float, or a :ref:`(connect timeout, read + timeout) ` tuple. + :type timeout: float or tuple + :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. + :type allow_redirects: bool + :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use. Defaults to ``True``. + :param stream: (optional) if ``False``, the response content will be immediately downloaded. + :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. + :return: :class:`Response ` object + :rtype: requests.Response + + Usage:: + + >>> import requests + >>> req = requests.request('GET', 'https://httpbin.org/get') + >>> req + + """ + + # By using the 'with' statement we are sure the session is closed, thus we + # avoid leaving sockets open which can trigger a ResourceWarning in some + # cases, and look like a memory leak in others. + with sessions.Session() as session: + return session.request(method=method, url=url, **kwargs) + + +def get(url, params=None, **kwargs): + r"""Sends a GET request. + + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary, list of tuples or bytes to send + in the query string for the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("get", url, params=params, **kwargs) + + +def options(url, **kwargs): + r"""Sends an OPTIONS request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("options", url, **kwargs) + + +def head(url, **kwargs): + r"""Sends a HEAD request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. If + `allow_redirects` is not provided, it will be set to `False` (as + opposed to the default :meth:`request` behavior). + :return: :class:`Response ` object + :rtype: requests.Response + """ + + kwargs.setdefault("allow_redirects", False) + return request("head", url, **kwargs) + + +def post(url, data=None, json=None, **kwargs): + r"""Sends a POST request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("post", url, data=data, json=json, **kwargs) + + +def put(url, data=None, **kwargs): + r"""Sends a PUT request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("put", url, data=data, **kwargs) + + +def patch(url, data=None, **kwargs): + r"""Sends a PATCH request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("patch", url, data=data, **kwargs) + + +def delete(url, **kwargs): + r"""Sends a DELETE request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("delete", url, **kwargs) diff --git a/venv/lib/python3.13/site-packages/requests/auth.py b/venv/lib/python3.13/site-packages/requests/auth.py new file mode 100644 index 0000000000000000000000000000000000000000..4a7ce6dc1460e0de8aa0c38ea9123faa69bd5110 --- /dev/null +++ b/venv/lib/python3.13/site-packages/requests/auth.py @@ -0,0 +1,314 @@ +""" +requests.auth +~~~~~~~~~~~~~ + +This module contains the authentication handlers for Requests. +""" + +import hashlib +import os +import re +import threading +import time +import warnings +from base64 import b64encode + +from ._internal_utils import to_native_string +from .compat import basestring, str, urlparse +from .cookies import extract_cookies_to_jar +from .utils import parse_dict_header + +CONTENT_TYPE_FORM_URLENCODED = "application/x-www-form-urlencoded" +CONTENT_TYPE_MULTI_PART = "multipart/form-data" + + +def _basic_auth_str(username, password): + """Returns a Basic Auth string.""" + + # "I want us to put a big-ol' comment on top of it that + # says that this behaviour is dumb but we need to preserve + # it because people are relying on it." + # - Lukasa + # + # These are here solely to maintain backwards compatibility + # for things like ints. This will be removed in 3.0.0. + if not isinstance(username, basestring): + warnings.warn( + "Non-string usernames will no longer be supported in Requests " + "3.0.0. Please convert the object you've passed in ({!r}) to " + "a string or bytes object in the near future to avoid " + "problems.".format(username), + category=DeprecationWarning, + ) + username = str(username) + + if not isinstance(password, basestring): + warnings.warn( + "Non-string passwords will no longer be supported in Requests " + "3.0.0. Please convert the object you've passed in ({!r}) to " + "a string or bytes object in the near future to avoid " + "problems.".format(type(password)), + category=DeprecationWarning, + ) + password = str(password) + # -- End Removal -- + + if isinstance(username, str): + username = username.encode("latin1") + + if isinstance(password, str): + password = password.encode("latin1") + + authstr = "Basic " + to_native_string( + b64encode(b":".join((username, password))).strip() + ) + + return authstr + + +class AuthBase: + """Base class that all auth implementations derive from""" + + def __call__(self, r): + raise NotImplementedError("Auth hooks must be callable.") + + +class HTTPBasicAuth(AuthBase): + """Attaches HTTP Basic Authentication to the given Request object.""" + + def __init__(self, username, password): + self.username = username + self.password = password + + def __eq__(self, other): + return all( + [ + self.username == getattr(other, "username", None), + self.password == getattr(other, "password", None), + ] + ) + + def __ne__(self, other): + return not self == other + + def __call__(self, r): + r.headers["Authorization"] = _basic_auth_str(self.username, self.password) + return r + + +class HTTPProxyAuth(HTTPBasicAuth): + """Attaches HTTP Proxy Authentication to a given Request object.""" + + def __call__(self, r): + r.headers["Proxy-Authorization"] = _basic_auth_str(self.username, self.password) + return r + + +class HTTPDigestAuth(AuthBase): + """Attaches HTTP Digest Authentication to the given Request object.""" + + def __init__(self, username, password): + self.username = username + self.password = password + # Keep state in per-thread local storage + self._thread_local = threading.local() + + def init_per_thread_state(self): + # Ensure state is initialized just once per-thread + if not hasattr(self._thread_local, "init"): + self._thread_local.init = True + self._thread_local.last_nonce = "" + self._thread_local.nonce_count = 0 + self._thread_local.chal = {} + self._thread_local.pos = None + self._thread_local.num_401_calls = None + + def build_digest_header(self, method, url): + """ + :rtype: str + """ + + realm = self._thread_local.chal["realm"] + nonce = self._thread_local.chal["nonce"] + qop = self._thread_local.chal.get("qop") + algorithm = self._thread_local.chal.get("algorithm") + opaque = self._thread_local.chal.get("opaque") + hash_utf8 = None + + if algorithm is None: + _algorithm = "MD5" + else: + _algorithm = algorithm.upper() + # lambdas assume digest modules are imported at the top level + if _algorithm == "MD5" or _algorithm == "MD5-SESS": + + def md5_utf8(x): + if isinstance(x, str): + x = x.encode("utf-8") + return hashlib.md5(x).hexdigest() + + hash_utf8 = md5_utf8 + elif _algorithm == "SHA": + + def sha_utf8(x): + if isinstance(x, str): + x = x.encode("utf-8") + return hashlib.sha1(x).hexdigest() + + hash_utf8 = sha_utf8 + elif _algorithm == "SHA-256": + + def sha256_utf8(x): + if isinstance(x, str): + x = x.encode("utf-8") + return hashlib.sha256(x).hexdigest() + + hash_utf8 = sha256_utf8 + elif _algorithm == "SHA-512": + + def sha512_utf8(x): + if isinstance(x, str): + x = x.encode("utf-8") + return hashlib.sha512(x).hexdigest() + + hash_utf8 = sha512_utf8 + + KD = lambda s, d: hash_utf8(f"{s}:{d}") # noqa:E731 + + if hash_utf8 is None: + return None + + # XXX not implemented yet + entdig = None + p_parsed = urlparse(url) + #: path is request-uri defined in RFC 2616 which should not be empty + path = p_parsed.path or "/" + if p_parsed.query: + path += f"?{p_parsed.query}" + + A1 = f"{self.username}:{realm}:{self.password}" + A2 = f"{method}:{path}" + + HA1 = hash_utf8(A1) + HA2 = hash_utf8(A2) + + if nonce == self._thread_local.last_nonce: + self._thread_local.nonce_count += 1 + else: + self._thread_local.nonce_count = 1 + ncvalue = f"{self._thread_local.nonce_count:08x}" + s = str(self._thread_local.nonce_count).encode("utf-8") + s += nonce.encode("utf-8") + s += time.ctime().encode("utf-8") + s += os.urandom(8) + + cnonce = hashlib.sha1(s).hexdigest()[:16] + if _algorithm == "MD5-SESS": + HA1 = hash_utf8(f"{HA1}:{nonce}:{cnonce}") + + if not qop: + respdig = KD(HA1, f"{nonce}:{HA2}") + elif qop == "auth" or "auth" in qop.split(","): + noncebit = f"{nonce}:{ncvalue}:{cnonce}:auth:{HA2}" + respdig = KD(HA1, noncebit) + else: + # XXX handle auth-int. + return None + + self._thread_local.last_nonce = nonce + + # XXX should the partial digests be encoded too? + base = ( + f'username="{self.username}", realm="{realm}", nonce="{nonce}", ' + f'uri="{path}", response="{respdig}"' + ) + if opaque: + base += f', opaque="{opaque}"' + if algorithm: + base += f', algorithm="{algorithm}"' + if entdig: + base += f', digest="{entdig}"' + if qop: + base += f', qop="auth", nc={ncvalue}, cnonce="{cnonce}"' + + return f"Digest {base}" + + def handle_redirect(self, r, **kwargs): + """Reset num_401_calls counter on redirects.""" + if r.is_redirect: + self._thread_local.num_401_calls = 1 + + def handle_401(self, r, **kwargs): + """ + Takes the given response and tries digest-auth, if needed. + + :rtype: requests.Response + """ + + # If response is not 4xx, do not auth + # See https://github.com/psf/requests/issues/3772 + if not 400 <= r.status_code < 500: + self._thread_local.num_401_calls = 1 + return r + + if self._thread_local.pos is not None: + # Rewind the file position indicator of the body to where + # it was to resend the request. + r.request.body.seek(self._thread_local.pos) + s_auth = r.headers.get("www-authenticate", "") + + if "digest" in s_auth.lower() and self._thread_local.num_401_calls < 2: + self._thread_local.num_401_calls += 1 + pat = re.compile(r"digest ", flags=re.IGNORECASE) + self._thread_local.chal = parse_dict_header(pat.sub("", s_auth, count=1)) + + # Consume content and release the original connection + # to allow our new request to reuse the same one. + r.content + r.close() + prep = r.request.copy() + extract_cookies_to_jar(prep._cookies, r.request, r.raw) + prep.prepare_cookies(prep._cookies) + + prep.headers["Authorization"] = self.build_digest_header( + prep.method, prep.url + ) + _r = r.connection.send(prep, **kwargs) + _r.history.append(r) + _r.request = prep + + return _r + + self._thread_local.num_401_calls = 1 + return r + + def __call__(self, r): + # Initialize per-thread state, if needed + self.init_per_thread_state() + # If we have a saved nonce, skip the 401 + if self._thread_local.last_nonce: + r.headers["Authorization"] = self.build_digest_header(r.method, r.url) + try: + self._thread_local.pos = r.body.tell() + except AttributeError: + # In the case of HTTPDigestAuth being reused and the body of + # the previous request was a file-like object, pos has the + # file position of the previous body. Ensure it's set to + # None. + self._thread_local.pos = None + r.register_hook("response", self.handle_401) + r.register_hook("response", self.handle_redirect) + self._thread_local.num_401_calls = 1 + + return r + + def __eq__(self, other): + return all( + [ + self.username == getattr(other, "username", None), + self.password == getattr(other, "password", None), + ] + ) + + def __ne__(self, other): + return not self == other diff --git a/venv/lib/python3.13/site-packages/requests/certs.py b/venv/lib/python3.13/site-packages/requests/certs.py new file mode 100644 index 0000000000000000000000000000000000000000..be422c3e91e43bacf60ff3302688df0b28742333 --- /dev/null +++ b/venv/lib/python3.13/site-packages/requests/certs.py @@ -0,0 +1,17 @@ +#!/usr/bin/env python + +""" +requests.certs +~~~~~~~~~~~~~~ + +This module returns the preferred default CA certificate bundle. There is +only one — the one from the certifi package. + +If you are packaging Requests, e.g., for a Linux distribution or a managed +environment, you can change the definition of where() to return a separately +packaged CA bundle. +""" +from certifi import where + +if __name__ == "__main__": + print(where()) diff --git a/venv/lib/python3.13/site-packages/requests/compat.py b/venv/lib/python3.13/site-packages/requests/compat.py new file mode 100644 index 0000000000000000000000000000000000000000..7f9d754350c9fe28db41e328ea880b9e4b20cc8b --- /dev/null +++ b/venv/lib/python3.13/site-packages/requests/compat.py @@ -0,0 +1,106 @@ +""" +requests.compat +~~~~~~~~~~~~~~~ + +This module previously handled import compatibility issues +between Python 2 and Python 3. It remains for backwards +compatibility until the next major version. +""" + +import importlib +import sys + +# ------- +# urllib3 +# ------- +from urllib3 import __version__ as urllib3_version + +# Detect which major version of urllib3 is being used. +try: + is_urllib3_1 = int(urllib3_version.split(".")[0]) == 1 +except (TypeError, AttributeError): + # If we can't discern a version, prefer old functionality. + is_urllib3_1 = True + +# ------------------- +# Character Detection +# ------------------- + + +def _resolve_char_detection(): + """Find supported character detection libraries.""" + chardet = None + for lib in ("chardet", "charset_normalizer"): + if chardet is None: + try: + chardet = importlib.import_module(lib) + except ImportError: + pass + return chardet + + +chardet = _resolve_char_detection() + +# ------- +# Pythons +# ------- + +# Syntax sugar. +_ver = sys.version_info + +#: Python 2.x? +is_py2 = _ver[0] == 2 + +#: Python 3.x? +is_py3 = _ver[0] == 3 + +# json/simplejson module import resolution +has_simplejson = False +try: + import simplejson as json + + has_simplejson = True +except ImportError: + import json + +if has_simplejson: + from simplejson import JSONDecodeError +else: + from json import JSONDecodeError + +# Keep OrderedDict for backwards compatibility. +from collections import OrderedDict +from collections.abc import Callable, Mapping, MutableMapping +from http import cookiejar as cookielib +from http.cookies import Morsel +from io import StringIO + +# -------------- +# Legacy Imports +# -------------- +from urllib.parse import ( + quote, + quote_plus, + unquote, + unquote_plus, + urldefrag, + urlencode, + urljoin, + urlparse, + urlsplit, + urlunparse, +) +from urllib.request import ( + getproxies, + getproxies_environment, + parse_http_list, + proxy_bypass, + proxy_bypass_environment, +) + +builtin_str = str +str = str +bytes = bytes +basestring = (str, bytes) +numeric_types = (int, float) +integer_types = (int,) diff --git a/venv/lib/python3.13/site-packages/requests/cookies.py b/venv/lib/python3.13/site-packages/requests/cookies.py new file mode 100644 index 0000000000000000000000000000000000000000..f69d0cda9e1c893401015a09f2db2de5a5960fd2 --- /dev/null +++ b/venv/lib/python3.13/site-packages/requests/cookies.py @@ -0,0 +1,561 @@ +""" +requests.cookies +~~~~~~~~~~~~~~~~ + +Compatibility code to be able to use `http.cookiejar.CookieJar` with requests. + +requests.utils imports from here, so be careful with imports. +""" + +import calendar +import copy +import time + +from ._internal_utils import to_native_string +from .compat import Morsel, MutableMapping, cookielib, urlparse, urlunparse + +try: + import threading +except ImportError: + import dummy_threading as threading + + +class MockRequest: + """Wraps a `requests.Request` to mimic a `urllib2.Request`. + + The code in `http.cookiejar.CookieJar` expects this interface in order to correctly + manage cookie policies, i.e., determine whether a cookie can be set, given the + domains of the request and the cookie. + + The original request object is read-only. The client is responsible for collecting + the new headers via `get_new_headers()` and interpreting them appropriately. You + probably want `get_cookie_header`, defined below. + """ + + def __init__(self, request): + self._r = request + self._new_headers = {} + self.type = urlparse(self._r.url).scheme + + def get_type(self): + return self.type + + def get_host(self): + return urlparse(self._r.url).netloc + + def get_origin_req_host(self): + return self.get_host() + + def get_full_url(self): + # Only return the response's URL if the user hadn't set the Host + # header + if not self._r.headers.get("Host"): + return self._r.url + # If they did set it, retrieve it and reconstruct the expected domain + host = to_native_string(self._r.headers["Host"], encoding="utf-8") + parsed = urlparse(self._r.url) + # Reconstruct the URL as we expect it + return urlunparse( + [ + parsed.scheme, + host, + parsed.path, + parsed.params, + parsed.query, + parsed.fragment, + ] + ) + + def is_unverifiable(self): + return True + + def has_header(self, name): + return name in self._r.headers or name in self._new_headers + + def get_header(self, name, default=None): + return self._r.headers.get(name, self._new_headers.get(name, default)) + + def add_header(self, key, val): + """cookiejar has no legitimate use for this method; add it back if you find one.""" + raise NotImplementedError( + "Cookie headers should be added with add_unredirected_header()" + ) + + def add_unredirected_header(self, name, value): + self._new_headers[name] = value + + def get_new_headers(self): + return self._new_headers + + @property + def unverifiable(self): + return self.is_unverifiable() + + @property + def origin_req_host(self): + return self.get_origin_req_host() + + @property + def host(self): + return self.get_host() + + +class MockResponse: + """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`. + + ...what? Basically, expose the parsed HTTP headers from the server response + the way `http.cookiejar` expects to see them. + """ + + def __init__(self, headers): + """Make a MockResponse for `cookiejar` to read. + + :param headers: a httplib.HTTPMessage or analogous carrying the headers + """ + self._headers = headers + + def info(self): + return self._headers + + def getheaders(self, name): + self._headers.getheaders(name) + + +def extract_cookies_to_jar(jar, request, response): + """Extract the cookies from the response into a CookieJar. + + :param jar: http.cookiejar.CookieJar (not necessarily a RequestsCookieJar) + :param request: our own requests.Request object + :param response: urllib3.HTTPResponse object + """ + if not (hasattr(response, "_original_response") and response._original_response): + return + # the _original_response field is the wrapped httplib.HTTPResponse object, + req = MockRequest(request) + # pull out the HTTPMessage with the headers and put it in the mock: + res = MockResponse(response._original_response.msg) + jar.extract_cookies(res, req) + + +def get_cookie_header(jar, request): + """ + Produce an appropriate Cookie header string to be sent with `request`, or None. + + :rtype: str + """ + r = MockRequest(request) + jar.add_cookie_header(r) + return r.get_new_headers().get("Cookie") + + +def remove_cookie_by_name(cookiejar, name, domain=None, path=None): + """Unsets a cookie by name, by default over all domains and paths. + + Wraps CookieJar.clear(), is O(n). + """ + clearables = [] + for cookie in cookiejar: + if cookie.name != name: + continue + if domain is not None and domain != cookie.domain: + continue + if path is not None and path != cookie.path: + continue + clearables.append((cookie.domain, cookie.path, cookie.name)) + + for domain, path, name in clearables: + cookiejar.clear(domain, path, name) + + +class CookieConflictError(RuntimeError): + """There are two cookies that meet the criteria specified in the cookie jar. + Use .get and .set and include domain and path args in order to be more specific. + """ + + +class RequestsCookieJar(cookielib.CookieJar, MutableMapping): + """Compatibility class; is a http.cookiejar.CookieJar, but exposes a dict + interface. + + This is the CookieJar we create by default for requests and sessions that + don't specify one, since some clients may expect response.cookies and + session.cookies to support dict operations. + + Requests does not use the dict interface internally; it's just for + compatibility with external client code. All requests code should work + out of the box with externally provided instances of ``CookieJar``, e.g. + ``LWPCookieJar`` and ``FileCookieJar``. + + Unlike a regular CookieJar, this class is pickleable. + + .. warning:: dictionary operations that are normally O(1) may be O(n). + """ + + def get(self, name, default=None, domain=None, path=None): + """Dict-like get() that also supports optional domain and path args in + order to resolve naming collisions from using one cookie jar over + multiple domains. + + .. warning:: operation is O(n), not O(1). + """ + try: + return self._find_no_duplicates(name, domain, path) + except KeyError: + return default + + def set(self, name, value, **kwargs): + """Dict-like set() that also supports optional domain and path args in + order to resolve naming collisions from using one cookie jar over + multiple domains. + """ + # support client code that unsets cookies by assignment of a None value: + if value is None: + remove_cookie_by_name( + self, name, domain=kwargs.get("domain"), path=kwargs.get("path") + ) + return + + if isinstance(value, Morsel): + c = morsel_to_cookie(value) + else: + c = create_cookie(name, value, **kwargs) + self.set_cookie(c) + return c + + def iterkeys(self): + """Dict-like iterkeys() that returns an iterator of names of cookies + from the jar. + + .. seealso:: itervalues() and iteritems(). + """ + for cookie in iter(self): + yield cookie.name + + def keys(self): + """Dict-like keys() that returns a list of names of cookies from the + jar. + + .. seealso:: values() and items(). + """ + return list(self.iterkeys()) + + def itervalues(self): + """Dict-like itervalues() that returns an iterator of values of cookies + from the jar. + + .. seealso:: iterkeys() and iteritems(). + """ + for cookie in iter(self): + yield cookie.value + + def values(self): + """Dict-like values() that returns a list of values of cookies from the + jar. + + .. seealso:: keys() and items(). + """ + return list(self.itervalues()) + + def iteritems(self): + """Dict-like iteritems() that returns an iterator of name-value tuples + from the jar. + + .. seealso:: iterkeys() and itervalues(). + """ + for cookie in iter(self): + yield cookie.name, cookie.value + + def items(self): + """Dict-like items() that returns a list of name-value tuples from the + jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a + vanilla python dict of key value pairs. + + .. seealso:: keys() and values(). + """ + return list(self.iteritems()) + + def list_domains(self): + """Utility method to list all the domains in the jar.""" + domains = [] + for cookie in iter(self): + if cookie.domain not in domains: + domains.append(cookie.domain) + return domains + + def list_paths(self): + """Utility method to list all the paths in the jar.""" + paths = [] + for cookie in iter(self): + if cookie.path not in paths: + paths.append(cookie.path) + return paths + + def multiple_domains(self): + """Returns True if there are multiple domains in the jar. + Returns False otherwise. + + :rtype: bool + """ + domains = [] + for cookie in iter(self): + if cookie.domain is not None and cookie.domain in domains: + return True + domains.append(cookie.domain) + return False # there is only one domain in jar + + def get_dict(self, domain=None, path=None): + """Takes as an argument an optional domain and path and returns a plain + old Python dict of name-value pairs of cookies that meet the + requirements. + + :rtype: dict + """ + dictionary = {} + for cookie in iter(self): + if (domain is None or cookie.domain == domain) and ( + path is None or cookie.path == path + ): + dictionary[cookie.name] = cookie.value + return dictionary + + def __contains__(self, name): + try: + return super().__contains__(name) + except CookieConflictError: + return True + + def __getitem__(self, name): + """Dict-like __getitem__() for compatibility with client code. Throws + exception if there are more than one cookie with name. In that case, + use the more explicit get() method instead. + + .. warning:: operation is O(n), not O(1). + """ + return self._find_no_duplicates(name) + + def __setitem__(self, name, value): + """Dict-like __setitem__ for compatibility with client code. Throws + exception if there is already a cookie of that name in the jar. In that + case, use the more explicit set() method instead. + """ + self.set(name, value) + + def __delitem__(self, name): + """Deletes a cookie given a name. Wraps ``http.cookiejar.CookieJar``'s + ``remove_cookie_by_name()``. + """ + remove_cookie_by_name(self, name) + + def set_cookie(self, cookie, *args, **kwargs): + if ( + hasattr(cookie.value, "startswith") + and cookie.value.startswith('"') + and cookie.value.endswith('"') + ): + cookie.value = cookie.value.replace('\\"', "") + return super().set_cookie(cookie, *args, **kwargs) + + def update(self, other): + """Updates this jar with cookies from another CookieJar or dict-like""" + if isinstance(other, cookielib.CookieJar): + for cookie in other: + self.set_cookie(copy.copy(cookie)) + else: + super().update(other) + + def _find(self, name, domain=None, path=None): + """Requests uses this method internally to get cookie values. + + If there are conflicting cookies, _find arbitrarily chooses one. + See _find_no_duplicates if you want an exception thrown if there are + conflicting cookies. + + :param name: a string containing name of cookie + :param domain: (optional) string containing domain of cookie + :param path: (optional) string containing path of cookie + :return: cookie.value + """ + for cookie in iter(self): + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + return cookie.value + + raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}") + + def _find_no_duplicates(self, name, domain=None, path=None): + """Both ``__get_item__`` and ``get`` call this function: it's never + used elsewhere in Requests. + + :param name: a string containing name of cookie + :param domain: (optional) string containing domain of cookie + :param path: (optional) string containing path of cookie + :raises KeyError: if cookie is not found + :raises CookieConflictError: if there are multiple cookies + that match name and optionally domain and path + :return: cookie.value + """ + toReturn = None + for cookie in iter(self): + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + if toReturn is not None: + # if there are multiple cookies that meet passed in criteria + raise CookieConflictError( + f"There are multiple cookies with name, {name!r}" + ) + # we will eventually return this as long as no cookie conflict + toReturn = cookie.value + + if toReturn: + return toReturn + raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}") + + def __getstate__(self): + """Unlike a normal CookieJar, this class is pickleable.""" + state = self.__dict__.copy() + # remove the unpickleable RLock object + state.pop("_cookies_lock") + return state + + def __setstate__(self, state): + """Unlike a normal CookieJar, this class is pickleable.""" + self.__dict__.update(state) + if "_cookies_lock" not in self.__dict__: + self._cookies_lock = threading.RLock() + + def copy(self): + """Return a copy of this RequestsCookieJar.""" + new_cj = RequestsCookieJar() + new_cj.set_policy(self.get_policy()) + new_cj.update(self) + return new_cj + + def get_policy(self): + """Return the CookiePolicy instance used.""" + return self._policy + + +def _copy_cookie_jar(jar): + if jar is None: + return None + + if hasattr(jar, "copy"): + # We're dealing with an instance of RequestsCookieJar + return jar.copy() + # We're dealing with a generic CookieJar instance + new_jar = copy.copy(jar) + new_jar.clear() + for cookie in jar: + new_jar.set_cookie(copy.copy(cookie)) + return new_jar + + +def create_cookie(name, value, **kwargs): + """Make a cookie from underspecified parameters. + + By default, the pair of `name` and `value` will be set for the domain '' + and sent on every request (this is sometimes called a "supercookie"). + """ + result = { + "version": 0, + "name": name, + "value": value, + "port": None, + "domain": "", + "path": "/", + "secure": False, + "expires": None, + "discard": True, + "comment": None, + "comment_url": None, + "rest": {"HttpOnly": None}, + "rfc2109": False, + } + + badargs = set(kwargs) - set(result) + if badargs: + raise TypeError( + f"create_cookie() got unexpected keyword arguments: {list(badargs)}" + ) + + result.update(kwargs) + result["port_specified"] = bool(result["port"]) + result["domain_specified"] = bool(result["domain"]) + result["domain_initial_dot"] = result["domain"].startswith(".") + result["path_specified"] = bool(result["path"]) + + return cookielib.Cookie(**result) + + +def morsel_to_cookie(morsel): + """Convert a Morsel object into a Cookie containing the one k/v pair.""" + + expires = None + if morsel["max-age"]: + try: + expires = int(time.time() + int(morsel["max-age"])) + except ValueError: + raise TypeError(f"max-age: {morsel['max-age']} must be integer") + elif morsel["expires"]: + time_template = "%a, %d-%b-%Y %H:%M:%S GMT" + expires = calendar.timegm(time.strptime(morsel["expires"], time_template)) + return create_cookie( + comment=morsel["comment"], + comment_url=bool(morsel["comment"]), + discard=False, + domain=morsel["domain"], + expires=expires, + name=morsel.key, + path=morsel["path"], + port=None, + rest={"HttpOnly": morsel["httponly"]}, + rfc2109=False, + secure=bool(morsel["secure"]), + value=morsel.value, + version=morsel["version"] or 0, + ) + + +def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True): + """Returns a CookieJar from a key/value dictionary. + + :param cookie_dict: Dict of key/values to insert into CookieJar. + :param cookiejar: (optional) A cookiejar to add the cookies to. + :param overwrite: (optional) If False, will not replace cookies + already in the jar with new ones. + :rtype: CookieJar + """ + if cookiejar is None: + cookiejar = RequestsCookieJar() + + if cookie_dict is not None: + names_from_jar = [cookie.name for cookie in cookiejar] + for name in cookie_dict: + if overwrite or (name not in names_from_jar): + cookiejar.set_cookie(create_cookie(name, cookie_dict[name])) + + return cookiejar + + +def merge_cookies(cookiejar, cookies): + """Add cookies to cookiejar and returns a merged CookieJar. + + :param cookiejar: CookieJar object to add the cookies to. + :param cookies: Dictionary or CookieJar object to be added. + :rtype: CookieJar + """ + if not isinstance(cookiejar, cookielib.CookieJar): + raise ValueError("You can only merge into CookieJar") + + if isinstance(cookies, dict): + cookiejar = cookiejar_from_dict(cookies, cookiejar=cookiejar, overwrite=False) + elif isinstance(cookies, cookielib.CookieJar): + try: + cookiejar.update(cookies) + except AttributeError: + for cookie_in_jar in cookies: + cookiejar.set_cookie(cookie_in_jar) + + return cookiejar diff --git a/venv/lib/python3.13/site-packages/requests/exceptions.py b/venv/lib/python3.13/site-packages/requests/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..83986b489849131efeb7f286b328961205256fd8 --- /dev/null +++ b/venv/lib/python3.13/site-packages/requests/exceptions.py @@ -0,0 +1,151 @@ +""" +requests.exceptions +~~~~~~~~~~~~~~~~~~~ + +This module contains the set of Requests' exceptions. +""" +from urllib3.exceptions import HTTPError as BaseHTTPError + +from .compat import JSONDecodeError as CompatJSONDecodeError + + +class RequestException(IOError): + """There was an ambiguous exception that occurred while handling your + request. + """ + + def __init__(self, *args, **kwargs): + """Initialize RequestException with `request` and `response` objects.""" + response = kwargs.pop("response", None) + self.response = response + self.request = kwargs.pop("request", None) + if response is not None and not self.request and hasattr(response, "request"): + self.request = self.response.request + super().__init__(*args, **kwargs) + + +class InvalidJSONError(RequestException): + """A JSON error occurred.""" + + +class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError): + """Couldn't decode the text into json""" + + def __init__(self, *args, **kwargs): + """ + Construct the JSONDecodeError instance first with all + args. Then use it's args to construct the IOError so that + the json specific args aren't used as IOError specific args + and the error message from JSONDecodeError is preserved. + """ + CompatJSONDecodeError.__init__(self, *args) + InvalidJSONError.__init__(self, *self.args, **kwargs) + + def __reduce__(self): + """ + The __reduce__ method called when pickling the object must + be the one from the JSONDecodeError (be it json/simplejson) + as it expects all the arguments for instantiation, not just + one like the IOError, and the MRO would by default call the + __reduce__ method from the IOError due to the inheritance order. + """ + return CompatJSONDecodeError.__reduce__(self) + + +class HTTPError(RequestException): + """An HTTP error occurred.""" + + +class ConnectionError(RequestException): + """A Connection error occurred.""" + + +class ProxyError(ConnectionError): + """A proxy error occurred.""" + + +class SSLError(ConnectionError): + """An SSL error occurred.""" + + +class Timeout(RequestException): + """The request timed out. + + Catching this error will catch both + :exc:`~requests.exceptions.ConnectTimeout` and + :exc:`~requests.exceptions.ReadTimeout` errors. + """ + + +class ConnectTimeout(ConnectionError, Timeout): + """The request timed out while trying to connect to the remote server. + + Requests that produced this error are safe to retry. + """ + + +class ReadTimeout(Timeout): + """The server did not send any data in the allotted amount of time.""" + + +class URLRequired(RequestException): + """A valid URL is required to make a request.""" + + +class TooManyRedirects(RequestException): + """Too many redirects.""" + + +class MissingSchema(RequestException, ValueError): + """The URL scheme (e.g. http or https) is missing.""" + + +class InvalidSchema(RequestException, ValueError): + """The URL scheme provided is either invalid or unsupported.""" + + +class InvalidURL(RequestException, ValueError): + """The URL provided was somehow invalid.""" + + +class InvalidHeader(RequestException, ValueError): + """The header value provided was somehow invalid.""" + + +class InvalidProxyURL(InvalidURL): + """The proxy URL provided is invalid.""" + + +class ChunkedEncodingError(RequestException): + """The server declared chunked encoding but sent an invalid chunk.""" + + +class ContentDecodingError(RequestException, BaseHTTPError): + """Failed to decode response content.""" + + +class StreamConsumedError(RequestException, TypeError): + """The content for this response was already consumed.""" + + +class RetryError(RequestException): + """Custom retries logic failed""" + + +class UnrewindableBodyError(RequestException): + """Requests encountered an error when trying to rewind a body.""" + + +# Warnings + + +class RequestsWarning(Warning): + """Base warning for Requests.""" + + +class FileModeWarning(RequestsWarning, DeprecationWarning): + """A file was opened in text mode, but Requests determined its binary length.""" + + +class RequestsDependencyWarning(RequestsWarning): + """An imported dependency doesn't match the expected version range.""" diff --git a/venv/lib/python3.13/site-packages/requests/help.py b/venv/lib/python3.13/site-packages/requests/help.py new file mode 100644 index 0000000000000000000000000000000000000000..8fbcd6560a8fe2c8a07e3bd1441a81e0db9cb689 --- /dev/null +++ b/venv/lib/python3.13/site-packages/requests/help.py @@ -0,0 +1,134 @@ +"""Module containing bug report helper(s).""" + +import json +import platform +import ssl +import sys + +import idna +import urllib3 + +from . import __version__ as requests_version + +try: + import charset_normalizer +except ImportError: + charset_normalizer = None + +try: + import chardet +except ImportError: + chardet = None + +try: + from urllib3.contrib import pyopenssl +except ImportError: + pyopenssl = None + OpenSSL = None + cryptography = None +else: + import cryptography + import OpenSSL + + +def _implementation(): + """Return a dict with the Python implementation and version. + + Provide both the name and the version of the Python implementation + currently running. For example, on CPython 3.10.3 it will return + {'name': 'CPython', 'version': '3.10.3'}. + + This function works best on CPython and PyPy: in particular, it probably + doesn't work for Jython or IronPython. Future investigation should be done + to work out the correct shape of the code for those platforms. + """ + implementation = platform.python_implementation() + + if implementation == "CPython": + implementation_version = platform.python_version() + elif implementation == "PyPy": + implementation_version = "{}.{}.{}".format( + sys.pypy_version_info.major, + sys.pypy_version_info.minor, + sys.pypy_version_info.micro, + ) + if sys.pypy_version_info.releaselevel != "final": + implementation_version = "".join( + [implementation_version, sys.pypy_version_info.releaselevel] + ) + elif implementation == "Jython": + implementation_version = platform.python_version() # Complete Guess + elif implementation == "IronPython": + implementation_version = platform.python_version() # Complete Guess + else: + implementation_version = "Unknown" + + return {"name": implementation, "version": implementation_version} + + +def info(): + """Generate information for a bug report.""" + try: + platform_info = { + "system": platform.system(), + "release": platform.release(), + } + except OSError: + platform_info = { + "system": "Unknown", + "release": "Unknown", + } + + implementation_info = _implementation() + urllib3_info = {"version": urllib3.__version__} + charset_normalizer_info = {"version": None} + chardet_info = {"version": None} + if charset_normalizer: + charset_normalizer_info = {"version": charset_normalizer.__version__} + if chardet: + chardet_info = {"version": chardet.__version__} + + pyopenssl_info = { + "version": None, + "openssl_version": "", + } + if OpenSSL: + pyopenssl_info = { + "version": OpenSSL.__version__, + "openssl_version": f"{OpenSSL.SSL.OPENSSL_VERSION_NUMBER:x}", + } + cryptography_info = { + "version": getattr(cryptography, "__version__", ""), + } + idna_info = { + "version": getattr(idna, "__version__", ""), + } + + system_ssl = ssl.OPENSSL_VERSION_NUMBER + system_ssl_info = {"version": f"{system_ssl:x}" if system_ssl is not None else ""} + + return { + "platform": platform_info, + "implementation": implementation_info, + "system_ssl": system_ssl_info, + "using_pyopenssl": pyopenssl is not None, + "using_charset_normalizer": chardet is None, + "pyOpenSSL": pyopenssl_info, + "urllib3": urllib3_info, + "chardet": chardet_info, + "charset_normalizer": charset_normalizer_info, + "cryptography": cryptography_info, + "idna": idna_info, + "requests": { + "version": requests_version, + }, + } + + +def main(): + """Pretty-print the bug information as JSON.""" + print(json.dumps(info(), sort_keys=True, indent=2)) + + +if __name__ == "__main__": + main() diff --git a/venv/lib/python3.13/site-packages/requests/hooks.py b/venv/lib/python3.13/site-packages/requests/hooks.py new file mode 100644 index 0000000000000000000000000000000000000000..d181ba2ec2e55d274897315887b78fbdca757da8 --- /dev/null +++ b/venv/lib/python3.13/site-packages/requests/hooks.py @@ -0,0 +1,33 @@ +""" +requests.hooks +~~~~~~~~~~~~~~ + +This module provides the capabilities for the Requests hooks system. + +Available hooks: + +``response``: + The response generated from a Request. +""" +HOOKS = ["response"] + + +def default_hooks(): + return {event: [] for event in HOOKS} + + +# TODO: response is the only one + + +def dispatch_hook(key, hooks, hook_data, **kwargs): + """Dispatches a hook dictionary on a given piece of data.""" + hooks = hooks or {} + hooks = hooks.get(key) + if hooks: + if hasattr(hooks, "__call__"): + hooks = [hooks] + for hook in hooks: + _hook_data = hook(hook_data, **kwargs) + if _hook_data is not None: + hook_data = _hook_data + return hook_data diff --git a/venv/lib/python3.13/site-packages/requests/models.py b/venv/lib/python3.13/site-packages/requests/models.py new file mode 100644 index 0000000000000000000000000000000000000000..c4b25fa0790da44683dab185c89ac08b69fb7419 --- /dev/null +++ b/venv/lib/python3.13/site-packages/requests/models.py @@ -0,0 +1,1039 @@ +""" +requests.models +~~~~~~~~~~~~~~~ + +This module contains the primary objects that power Requests. +""" + +import datetime + +# Import encoding now, to avoid implicit import later. +# Implicit import within threads may cause LookupError when standard library is in a ZIP, +# such as in Embedded Python. See https://github.com/psf/requests/issues/3578. +import encodings.idna # noqa: F401 +from io import UnsupportedOperation + +from urllib3.exceptions import ( + DecodeError, + LocationParseError, + ProtocolError, + ReadTimeoutError, + SSLError, +) +from urllib3.fields import RequestField +from urllib3.filepost import encode_multipart_formdata +from urllib3.util import parse_url + +from ._internal_utils import to_native_string, unicode_is_ascii +from .auth import HTTPBasicAuth +from .compat import ( + Callable, + JSONDecodeError, + Mapping, + basestring, + builtin_str, + chardet, + cookielib, +) +from .compat import json as complexjson +from .compat import urlencode, urlsplit, urlunparse +from .cookies import _copy_cookie_jar, cookiejar_from_dict, get_cookie_header +from .exceptions import ( + ChunkedEncodingError, + ConnectionError, + ContentDecodingError, + HTTPError, + InvalidJSONError, + InvalidURL, +) +from .exceptions import JSONDecodeError as RequestsJSONDecodeError +from .exceptions import MissingSchema +from .exceptions import SSLError as RequestsSSLError +from .exceptions import StreamConsumedError +from .hooks import default_hooks +from .status_codes import codes +from .structures import CaseInsensitiveDict +from .utils import ( + check_header_validity, + get_auth_from_url, + guess_filename, + guess_json_utf, + iter_slices, + parse_header_links, + requote_uri, + stream_decode_response_unicode, + super_len, + to_key_val_list, +) + +#: The set of HTTP status codes that indicate an automatically +#: processable redirect. +REDIRECT_STATI = ( + codes.moved, # 301 + codes.found, # 302 + codes.other, # 303 + codes.temporary_redirect, # 307 + codes.permanent_redirect, # 308 +) + +DEFAULT_REDIRECT_LIMIT = 30 +CONTENT_CHUNK_SIZE = 10 * 1024 +ITER_CHUNK_SIZE = 512 + + +class RequestEncodingMixin: + @property + def path_url(self): + """Build the path URL to use.""" + + url = [] + + p = urlsplit(self.url) + + path = p.path + if not path: + path = "/" + + url.append(path) + + query = p.query + if query: + url.append("?") + url.append(query) + + return "".join(url) + + @staticmethod + def _encode_params(data): + """Encode parameters in a piece of data. + + Will successfully encode parameters when passed as a dict or a list of + 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary + if parameters are supplied as a dict. + """ + + if isinstance(data, (str, bytes)): + return data + elif hasattr(data, "read"): + return data + elif hasattr(data, "__iter__"): + result = [] + for k, vs in to_key_val_list(data): + if isinstance(vs, basestring) or not hasattr(vs, "__iter__"): + vs = [vs] + for v in vs: + if v is not None: + result.append( + ( + k.encode("utf-8") if isinstance(k, str) else k, + v.encode("utf-8") if isinstance(v, str) else v, + ) + ) + return urlencode(result, doseq=True) + else: + return data + + @staticmethod + def _encode_files(files, data): + """Build the body for a multipart/form-data request. + + Will successfully encode files when passed as a dict or a list of + tuples. Order is retained if data is a list of tuples but arbitrary + if parameters are supplied as a dict. + The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) + or 4-tuples (filename, fileobj, contentype, custom_headers). + """ + if not files: + raise ValueError("Files must be provided.") + elif isinstance(data, basestring): + raise ValueError("Data must not be a string.") + + new_fields = [] + fields = to_key_val_list(data or {}) + files = to_key_val_list(files or {}) + + for field, val in fields: + if isinstance(val, basestring) or not hasattr(val, "__iter__"): + val = [val] + for v in val: + if v is not None: + # Don't call str() on bytestrings: in Py3 it all goes wrong. + if not isinstance(v, bytes): + v = str(v) + + new_fields.append( + ( + field.decode("utf-8") + if isinstance(field, bytes) + else field, + v.encode("utf-8") if isinstance(v, str) else v, + ) + ) + + for k, v in files: + # support for explicit filename + ft = None + fh = None + if isinstance(v, (tuple, list)): + if len(v) == 2: + fn, fp = v + elif len(v) == 3: + fn, fp, ft = v + else: + fn, fp, ft, fh = v + else: + fn = guess_filename(v) or k + fp = v + + if isinstance(fp, (str, bytes, bytearray)): + fdata = fp + elif hasattr(fp, "read"): + fdata = fp.read() + elif fp is None: + continue + else: + fdata = fp + + rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) + rf.make_multipart(content_type=ft) + new_fields.append(rf) + + body, content_type = encode_multipart_formdata(new_fields) + + return body, content_type + + +class RequestHooksMixin: + def register_hook(self, event, hook): + """Properly register a hook.""" + + if event not in self.hooks: + raise ValueError(f'Unsupported event specified, with event name "{event}"') + + if isinstance(hook, Callable): + self.hooks[event].append(hook) + elif hasattr(hook, "__iter__"): + self.hooks[event].extend(h for h in hook if isinstance(h, Callable)) + + def deregister_hook(self, event, hook): + """Deregister a previously registered hook. + Returns True if the hook existed, False if not. + """ + + try: + self.hooks[event].remove(hook) + return True + except ValueError: + return False + + +class Request(RequestHooksMixin): + """A user-created :class:`Request ` object. + + Used to prepare a :class:`PreparedRequest `, which is sent to the server. + + :param method: HTTP method to use. + :param url: URL to send. + :param headers: dictionary of headers to send. + :param files: dictionary of {filename: fileobject} files to multipart upload. + :param data: the body to attach to the request. If a dictionary or + list of tuples ``[(key, value)]`` is provided, form-encoding will + take place. + :param json: json for the body to attach to the request (if files or data is not specified). + :param params: URL parameters to append to the URL. If a dictionary or + list of tuples ``[(key, value)]`` is provided, form-encoding will + take place. + :param auth: Auth handler or (user, pass) tuple. + :param cookies: dictionary or CookieJar of cookies to attach to this request. + :param hooks: dictionary of callback hooks, for internal usage. + + Usage:: + + >>> import requests + >>> req = requests.Request('GET', 'https://httpbin.org/get') + >>> req.prepare() + + """ + + def __init__( + self, + method=None, + url=None, + headers=None, + files=None, + data=None, + params=None, + auth=None, + cookies=None, + hooks=None, + json=None, + ): + # Default empty dicts for dict params. + data = [] if data is None else data + files = [] if files is None else files + headers = {} if headers is None else headers + params = {} if params is None else params + hooks = {} if hooks is None else hooks + + self.hooks = default_hooks() + for k, v in list(hooks.items()): + self.register_hook(event=k, hook=v) + + self.method = method + self.url = url + self.headers = headers + self.files = files + self.data = data + self.json = json + self.params = params + self.auth = auth + self.cookies = cookies + + def __repr__(self): + return f"" + + def prepare(self): + """Constructs a :class:`PreparedRequest ` for transmission and returns it.""" + p = PreparedRequest() + p.prepare( + method=self.method, + url=self.url, + headers=self.headers, + files=self.files, + data=self.data, + json=self.json, + params=self.params, + auth=self.auth, + cookies=self.cookies, + hooks=self.hooks, + ) + return p + + +class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): + """The fully mutable :class:`PreparedRequest ` object, + containing the exact bytes that will be sent to the server. + + Instances are generated from a :class:`Request ` object, and + should not be instantiated manually; doing so may produce undesirable + effects. + + Usage:: + + >>> import requests + >>> req = requests.Request('GET', 'https://httpbin.org/get') + >>> r = req.prepare() + >>> r + + + >>> s = requests.Session() + >>> s.send(r) + + """ + + def __init__(self): + #: HTTP verb to send to the server. + self.method = None + #: HTTP URL to send the request to. + self.url = None + #: dictionary of HTTP headers. + self.headers = None + # The `CookieJar` used to create the Cookie header will be stored here + # after prepare_cookies is called + self._cookies = None + #: request body to send to the server. + self.body = None + #: dictionary of callback hooks, for internal usage. + self.hooks = default_hooks() + #: integer denoting starting position of a readable file-like body. + self._body_position = None + + def prepare( + self, + method=None, + url=None, + headers=None, + files=None, + data=None, + params=None, + auth=None, + cookies=None, + hooks=None, + json=None, + ): + """Prepares the entire request with the given parameters.""" + + self.prepare_method(method) + self.prepare_url(url, params) + self.prepare_headers(headers) + self.prepare_cookies(cookies) + self.prepare_body(data, files, json) + self.prepare_auth(auth, url) + + # Note that prepare_auth must be last to enable authentication schemes + # such as OAuth to work on a fully prepared request. + + # This MUST go after prepare_auth. Authenticators could add a hook + self.prepare_hooks(hooks) + + def __repr__(self): + return f"" + + def copy(self): + p = PreparedRequest() + p.method = self.method + p.url = self.url + p.headers = self.headers.copy() if self.headers is not None else None + p._cookies = _copy_cookie_jar(self._cookies) + p.body = self.body + p.hooks = self.hooks + p._body_position = self._body_position + return p + + def prepare_method(self, method): + """Prepares the given HTTP method.""" + self.method = method + if self.method is not None: + self.method = to_native_string(self.method.upper()) + + @staticmethod + def _get_idna_encoded_host(host): + import idna + + try: + host = idna.encode(host, uts46=True).decode("utf-8") + except idna.IDNAError: + raise UnicodeError + return host + + def prepare_url(self, url, params): + """Prepares the given HTTP URL.""" + #: Accept objects that have string representations. + #: We're unable to blindly call unicode/str functions + #: as this will include the bytestring indicator (b'') + #: on python 3.x. + #: https://github.com/psf/requests/pull/2238 + if isinstance(url, bytes): + url = url.decode("utf8") + else: + url = str(url) + + # Remove leading whitespaces from url + url = url.lstrip() + + # Don't do any URL preparation for non-HTTP schemes like `mailto`, + # `data` etc to work around exceptions from `url_parse`, which + # handles RFC 3986 only. + if ":" in url and not url.lower().startswith("http"): + self.url = url + return + + # Support for unicode domain names and paths. + try: + scheme, auth, host, port, path, query, fragment = parse_url(url) + except LocationParseError as e: + raise InvalidURL(*e.args) + + if not scheme: + raise MissingSchema( + f"Invalid URL {url!r}: No scheme supplied. " + f"Perhaps you meant https://{url}?" + ) + + if not host: + raise InvalidURL(f"Invalid URL {url!r}: No host supplied") + + # In general, we want to try IDNA encoding the hostname if the string contains + # non-ASCII characters. This allows users to automatically get the correct IDNA + # behaviour. For strings containing only ASCII characters, we need to also verify + # it doesn't start with a wildcard (*), before allowing the unencoded hostname. + if not unicode_is_ascii(host): + try: + host = self._get_idna_encoded_host(host) + except UnicodeError: + raise InvalidURL("URL has an invalid label.") + elif host.startswith(("*", ".")): + raise InvalidURL("URL has an invalid label.") + + # Carefully reconstruct the network location + netloc = auth or "" + if netloc: + netloc += "@" + netloc += host + if port: + netloc += f":{port}" + + # Bare domains aren't valid URLs. + if not path: + path = "/" + + if isinstance(params, (str, bytes)): + params = to_native_string(params) + + enc_params = self._encode_params(params) + if enc_params: + if query: + query = f"{query}&{enc_params}" + else: + query = enc_params + + url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) + self.url = url + + def prepare_headers(self, headers): + """Prepares the given HTTP headers.""" + + self.headers = CaseInsensitiveDict() + if headers: + for header in headers.items(): + # Raise exception on invalid header value. + check_header_validity(header) + name, value = header + self.headers[to_native_string(name)] = value + + def prepare_body(self, data, files, json=None): + """Prepares the given HTTP body data.""" + + # Check if file, fo, generator, iterator. + # If not, run through normal process. + + # Nottin' on you. + body = None + content_type = None + + if not data and json is not None: + # urllib3 requires a bytes-like body. Python 2's json.dumps + # provides this natively, but Python 3 gives a Unicode string. + content_type = "application/json" + + try: + body = complexjson.dumps(json, allow_nan=False) + except ValueError as ve: + raise InvalidJSONError(ve, request=self) + + if not isinstance(body, bytes): + body = body.encode("utf-8") + + is_stream = all( + [ + hasattr(data, "__iter__"), + not isinstance(data, (basestring, list, tuple, Mapping)), + ] + ) + + if is_stream: + try: + length = super_len(data) + except (TypeError, AttributeError, UnsupportedOperation): + length = None + + body = data + + if getattr(body, "tell", None) is not None: + # Record the current file position before reading. + # This will allow us to rewind a file in the event + # of a redirect. + try: + self._body_position = body.tell() + except OSError: + # This differentiates from None, allowing us to catch + # a failed `tell()` later when trying to rewind the body + self._body_position = object() + + if files: + raise NotImplementedError( + "Streamed bodies and files are mutually exclusive." + ) + + if length: + self.headers["Content-Length"] = builtin_str(length) + else: + self.headers["Transfer-Encoding"] = "chunked" + else: + # Multi-part file uploads. + if files: + (body, content_type) = self._encode_files(files, data) + else: + if data: + body = self._encode_params(data) + if isinstance(data, basestring) or hasattr(data, "read"): + content_type = None + else: + content_type = "application/x-www-form-urlencoded" + + self.prepare_content_length(body) + + # Add content-type if it wasn't explicitly provided. + if content_type and ("content-type" not in self.headers): + self.headers["Content-Type"] = content_type + + self.body = body + + def prepare_content_length(self, body): + """Prepare Content-Length header based on request method and body""" + if body is not None: + length = super_len(body) + if length: + # If length exists, set it. Otherwise, we fallback + # to Transfer-Encoding: chunked. + self.headers["Content-Length"] = builtin_str(length) + elif ( + self.method not in ("GET", "HEAD") + and self.headers.get("Content-Length") is None + ): + # Set Content-Length to 0 for methods that can have a body + # but don't provide one. (i.e. not GET or HEAD) + self.headers["Content-Length"] = "0" + + def prepare_auth(self, auth, url=""): + """Prepares the given HTTP auth data.""" + + # If no Auth is explicitly provided, extract it from the URL first. + if auth is None: + url_auth = get_auth_from_url(self.url) + auth = url_auth if any(url_auth) else None + + if auth: + if isinstance(auth, tuple) and len(auth) == 2: + # special-case basic HTTP auth + auth = HTTPBasicAuth(*auth) + + # Allow auth to make its changes. + r = auth(self) + + # Update self to reflect the auth changes. + self.__dict__.update(r.__dict__) + + # Recompute Content-Length + self.prepare_content_length(self.body) + + def prepare_cookies(self, cookies): + """Prepares the given HTTP cookie data. + + This function eventually generates a ``Cookie`` header from the + given cookies using cookielib. Due to cookielib's design, the header + will not be regenerated if it already exists, meaning this function + can only be called once for the life of the + :class:`PreparedRequest ` object. Any subsequent calls + to ``prepare_cookies`` will have no actual effect, unless the "Cookie" + header is removed beforehand. + """ + if isinstance(cookies, cookielib.CookieJar): + self._cookies = cookies + else: + self._cookies = cookiejar_from_dict(cookies) + + cookie_header = get_cookie_header(self._cookies, self) + if cookie_header is not None: + self.headers["Cookie"] = cookie_header + + def prepare_hooks(self, hooks): + """Prepares the given hooks.""" + # hooks can be passed as None to the prepare method and to this + # method. To prevent iterating over None, simply use an empty list + # if hooks is False-y + hooks = hooks or [] + for event in hooks: + self.register_hook(event, hooks[event]) + + +class Response: + """The :class:`Response ` object, which contains a + server's response to an HTTP request. + """ + + __attrs__ = [ + "_content", + "status_code", + "headers", + "url", + "history", + "encoding", + "reason", + "cookies", + "elapsed", + "request", + ] + + def __init__(self): + self._content = False + self._content_consumed = False + self._next = None + + #: Integer Code of responded HTTP Status, e.g. 404 or 200. + self.status_code = None + + #: Case-insensitive Dictionary of Response Headers. + #: For example, ``headers['content-encoding']`` will return the + #: value of a ``'Content-Encoding'`` response header. + self.headers = CaseInsensitiveDict() + + #: File-like object representation of response (for advanced usage). + #: Use of ``raw`` requires that ``stream=True`` be set on the request. + #: This requirement does not apply for use internally to Requests. + self.raw = None + + #: Final URL location of Response. + self.url = None + + #: Encoding to decode with when accessing r.text. + self.encoding = None + + #: A list of :class:`Response ` objects from + #: the history of the Request. Any redirect responses will end + #: up here. The list is sorted from the oldest to the most recent request. + self.history = [] + + #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". + self.reason = None + + #: A CookieJar of Cookies the server sent back. + self.cookies = cookiejar_from_dict({}) + + #: The amount of time elapsed between sending the request + #: and the arrival of the response (as a timedelta). + #: This property specifically measures the time taken between sending + #: the first byte of the request and finishing parsing the headers. It + #: is therefore unaffected by consuming the response content or the + #: value of the ``stream`` keyword argument. + self.elapsed = datetime.timedelta(0) + + #: The :class:`PreparedRequest ` object to which this + #: is a response. + self.request = None + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def __getstate__(self): + # Consume everything; accessing the content attribute makes + # sure the content has been fully read. + if not self._content_consumed: + self.content + + return {attr: getattr(self, attr, None) for attr in self.__attrs__} + + def __setstate__(self, state): + for name, value in state.items(): + setattr(self, name, value) + + # pickled objects do not have .raw + setattr(self, "_content_consumed", True) + setattr(self, "raw", None) + + def __repr__(self): + return f"" + + def __bool__(self): + """Returns True if :attr:`status_code` is less than 400. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code, is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + return self.ok + + def __nonzero__(self): + """Returns True if :attr:`status_code` is less than 400. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code, is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + return self.ok + + def __iter__(self): + """Allows you to use a response as an iterator.""" + return self.iter_content(128) + + @property + def ok(self): + """Returns True if :attr:`status_code` is less than 400, False if not. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + try: + self.raise_for_status() + except HTTPError: + return False + return True + + @property + def is_redirect(self): + """True if this Response is a well-formed HTTP redirect that could have + been processed automatically (by :meth:`Session.resolve_redirects`). + """ + return "location" in self.headers and self.status_code in REDIRECT_STATI + + @property + def is_permanent_redirect(self): + """True if this Response one of the permanent versions of redirect.""" + return "location" in self.headers and self.status_code in ( + codes.moved_permanently, + codes.permanent_redirect, + ) + + @property + def next(self): + """Returns a PreparedRequest for the next request in a redirect chain, if there is one.""" + return self._next + + @property + def apparent_encoding(self): + """The apparent encoding, provided by the charset_normalizer or chardet libraries.""" + if chardet is not None: + return chardet.detect(self.content)["encoding"] + else: + # If no character detection library is available, we'll fall back + # to a standard Python utf-8 str. + return "utf-8" + + def iter_content(self, chunk_size=1, decode_unicode=False): + """Iterates over the response data. When stream=True is set on the + request, this avoids reading the content at once into memory for + large responses. The chunk size is the number of bytes it should + read into memory. This is not necessarily the length of each item + returned as decoding can take place. + + chunk_size must be of type int or None. A value of None will + function differently depending on the value of `stream`. + stream=True will read data as it arrives in whatever size the + chunks are received. If stream=False, data is returned as + a single chunk. + + If decode_unicode is True, content will be decoded using the best + available encoding based on the response. + """ + + def generate(): + # Special case for urllib3. + if hasattr(self.raw, "stream"): + try: + yield from self.raw.stream(chunk_size, decode_content=True) + except ProtocolError as e: + raise ChunkedEncodingError(e) + except DecodeError as e: + raise ContentDecodingError(e) + except ReadTimeoutError as e: + raise ConnectionError(e) + except SSLError as e: + raise RequestsSSLError(e) + else: + # Standard file-like object. + while True: + chunk = self.raw.read(chunk_size) + if not chunk: + break + yield chunk + + self._content_consumed = True + + if self._content_consumed and isinstance(self._content, bool): + raise StreamConsumedError() + elif chunk_size is not None and not isinstance(chunk_size, int): + raise TypeError( + f"chunk_size must be an int, it is instead a {type(chunk_size)}." + ) + # simulate reading small chunks of the content + reused_chunks = iter_slices(self._content, chunk_size) + + stream_chunks = generate() + + chunks = reused_chunks if self._content_consumed else stream_chunks + + if decode_unicode: + chunks = stream_decode_response_unicode(chunks, self) + + return chunks + + def iter_lines( + self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None + ): + """Iterates over the response data, one line at a time. When + stream=True is set on the request, this avoids reading the + content at once into memory for large responses. + + .. note:: This method is not reentrant safe. + """ + + pending = None + + for chunk in self.iter_content( + chunk_size=chunk_size, decode_unicode=decode_unicode + ): + if pending is not None: + chunk = pending + chunk + + if delimiter: + lines = chunk.split(delimiter) + else: + lines = chunk.splitlines() + + if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: + pending = lines.pop() + else: + pending = None + + yield from lines + + if pending is not None: + yield pending + + @property + def content(self): + """Content of the response, in bytes.""" + + if self._content is False: + # Read the contents. + if self._content_consumed: + raise RuntimeError("The content for this response was already consumed") + + if self.status_code == 0 or self.raw is None: + self._content = None + else: + self._content = b"".join(self.iter_content(CONTENT_CHUNK_SIZE)) or b"" + + self._content_consumed = True + # don't need to release the connection; that's been handled by urllib3 + # since we exhausted the data. + return self._content + + @property + def text(self): + """Content of the response, in unicode. + + If Response.encoding is None, encoding will be guessed using + ``charset_normalizer`` or ``chardet``. + + The encoding of the response content is determined based solely on HTTP + headers, following RFC 2616 to the letter. If you can take advantage of + non-HTTP knowledge to make a better guess at the encoding, you should + set ``r.encoding`` appropriately before accessing this property. + """ + + # Try charset from content-type + content = None + encoding = self.encoding + + if not self.content: + return "" + + # Fallback to auto-detected encoding. + if self.encoding is None: + encoding = self.apparent_encoding + + # Decode unicode from given encoding. + try: + content = str(self.content, encoding, errors="replace") + except (LookupError, TypeError): + # A LookupError is raised if the encoding was not found which could + # indicate a misspelling or similar mistake. + # + # A TypeError can be raised if encoding is None + # + # So we try blindly encoding. + content = str(self.content, errors="replace") + + return content + + def json(self, **kwargs): + r"""Decodes the JSON response body (if any) as a Python object. + + This may return a dictionary, list, etc. depending on what is in the response. + + :param \*\*kwargs: Optional arguments that ``json.loads`` takes. + :raises requests.exceptions.JSONDecodeError: If the response body does not + contain valid json. + """ + + if not self.encoding and self.content and len(self.content) > 3: + # No encoding set. JSON RFC 4627 section 3 states we should expect + # UTF-8, -16 or -32. Detect which one to use; If the detection or + # decoding fails, fall back to `self.text` (using charset_normalizer to make + # a best guess). + encoding = guess_json_utf(self.content) + if encoding is not None: + try: + return complexjson.loads(self.content.decode(encoding), **kwargs) + except UnicodeDecodeError: + # Wrong UTF codec detected; usually because it's not UTF-8 + # but some other 8-bit codec. This is an RFC violation, + # and the server didn't bother to tell us what codec *was* + # used. + pass + except JSONDecodeError as e: + raise RequestsJSONDecodeError(e.msg, e.doc, e.pos) + + try: + return complexjson.loads(self.text, **kwargs) + except JSONDecodeError as e: + # Catch JSON-related errors and raise as requests.JSONDecodeError + # This aliases json.JSONDecodeError and simplejson.JSONDecodeError + raise RequestsJSONDecodeError(e.msg, e.doc, e.pos) + + @property + def links(self): + """Returns the parsed header links of the response, if any.""" + + header = self.headers.get("link") + + resolved_links = {} + + if header: + links = parse_header_links(header) + + for link in links: + key = link.get("rel") or link.get("url") + resolved_links[key] = link + + return resolved_links + + def raise_for_status(self): + """Raises :class:`HTTPError`, if one occurred.""" + + http_error_msg = "" + if isinstance(self.reason, bytes): + # We attempt to decode utf-8 first because some servers + # choose to localize their reason strings. If the string + # isn't utf-8, we fall back to iso-8859-1 for all other + # encodings. (See PR #3538) + try: + reason = self.reason.decode("utf-8") + except UnicodeDecodeError: + reason = self.reason.decode("iso-8859-1") + else: + reason = self.reason + + if 400 <= self.status_code < 500: + http_error_msg = ( + f"{self.status_code} Client Error: {reason} for url: {self.url}" + ) + + elif 500 <= self.status_code < 600: + http_error_msg = ( + f"{self.status_code} Server Error: {reason} for url: {self.url}" + ) + + if http_error_msg: + raise HTTPError(http_error_msg, response=self) + + def close(self): + """Releases the connection back to the pool. Once this method has been + called the underlying ``raw`` object must not be accessed again. + + *Note: Should not normally need to be called explicitly.* + """ + if not self._content_consumed: + self.raw.close() + + release_conn = getattr(self.raw, "release_conn", None) + if release_conn is not None: + release_conn() diff --git a/venv/lib/python3.13/site-packages/requests/packages.py b/venv/lib/python3.13/site-packages/requests/packages.py new file mode 100644 index 0000000000000000000000000000000000000000..5ab3d8e250de8475cb22553f564e5444e02c7460 --- /dev/null +++ b/venv/lib/python3.13/site-packages/requests/packages.py @@ -0,0 +1,23 @@ +import sys + +from .compat import chardet + +# This code exists for backwards compatibility reasons. +# I don't like it either. Just look the other way. :) + +for package in ("urllib3", "idna"): + locals()[package] = __import__(package) + # This traversal is apparently necessary such that the identities are + # preserved (requests.packages.urllib3.* is urllib3.*) + for mod in list(sys.modules): + if mod == package or mod.startswith(f"{package}."): + sys.modules[f"requests.packages.{mod}"] = sys.modules[mod] + +if chardet is not None: + target = chardet.__name__ + for mod in list(sys.modules): + if mod == target or mod.startswith(f"{target}."): + imported_mod = sys.modules[mod] + sys.modules[f"requests.packages.{mod}"] = imported_mod + mod = mod.replace(target, "chardet") + sys.modules[f"requests.packages.{mod}"] = imported_mod diff --git a/venv/lib/python3.13/site-packages/requests/sessions.py b/venv/lib/python3.13/site-packages/requests/sessions.py new file mode 100644 index 0000000000000000000000000000000000000000..731550de88aceb59747460faf519c2277e300e87 --- /dev/null +++ b/venv/lib/python3.13/site-packages/requests/sessions.py @@ -0,0 +1,831 @@ +""" +requests.sessions +~~~~~~~~~~~~~~~~~ + +This module provides a Session object to manage and persist settings across +requests (cookies, auth, proxies). +""" +import os +import sys +import time +from collections import OrderedDict +from datetime import timedelta + +from ._internal_utils import to_native_string +from .adapters import HTTPAdapter +from .auth import _basic_auth_str +from .compat import Mapping, cookielib, urljoin, urlparse +from .cookies import ( + RequestsCookieJar, + cookiejar_from_dict, + extract_cookies_to_jar, + merge_cookies, +) +from .exceptions import ( + ChunkedEncodingError, + ContentDecodingError, + InvalidSchema, + TooManyRedirects, +) +from .hooks import default_hooks, dispatch_hook + +# formerly defined here, reexposed here for backward compatibility +from .models import ( # noqa: F401 + DEFAULT_REDIRECT_LIMIT, + REDIRECT_STATI, + PreparedRequest, + Request, +) +from .status_codes import codes +from .structures import CaseInsensitiveDict +from .utils import ( # noqa: F401 + DEFAULT_PORTS, + default_headers, + get_auth_from_url, + get_environ_proxies, + get_netrc_auth, + requote_uri, + resolve_proxies, + rewind_body, + should_bypass_proxies, + to_key_val_list, +) + +# Preferred clock, based on which one is more accurate on a given system. +if sys.platform == "win32": + preferred_clock = time.perf_counter +else: + preferred_clock = time.time + + +def merge_setting(request_setting, session_setting, dict_class=OrderedDict): + """Determines appropriate setting for a given request, taking into account + the explicit setting on that request, and the setting in the session. If a + setting is a dictionary, they will be merged together using `dict_class` + """ + + if session_setting is None: + return request_setting + + if request_setting is None: + return session_setting + + # Bypass if not a dictionary (e.g. verify) + if not ( + isinstance(session_setting, Mapping) and isinstance(request_setting, Mapping) + ): + return request_setting + + merged_setting = dict_class(to_key_val_list(session_setting)) + merged_setting.update(to_key_val_list(request_setting)) + + # Remove keys that are set to None. Extract keys first to avoid altering + # the dictionary during iteration. + none_keys = [k for (k, v) in merged_setting.items() if v is None] + for key in none_keys: + del merged_setting[key] + + return merged_setting + + +def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict): + """Properly merges both requests and session hooks. + + This is necessary because when request_hooks == {'response': []}, the + merge breaks Session hooks entirely. + """ + if session_hooks is None or session_hooks.get("response") == []: + return request_hooks + + if request_hooks is None or request_hooks.get("response") == []: + return session_hooks + + return merge_setting(request_hooks, session_hooks, dict_class) + + +class SessionRedirectMixin: + def get_redirect_target(self, resp): + """Receives a Response. Returns a redirect URI or ``None``""" + # Due to the nature of how requests processes redirects this method will + # be called at least once upon the original response and at least twice + # on each subsequent redirect response (if any). + # If a custom mixin is used to handle this logic, it may be advantageous + # to cache the redirect location onto the response object as a private + # attribute. + if resp.is_redirect: + location = resp.headers["location"] + # Currently the underlying http module on py3 decode headers + # in latin1, but empirical evidence suggests that latin1 is very + # rarely used with non-ASCII characters in HTTP headers. + # It is more likely to get UTF8 header rather than latin1. + # This causes incorrect handling of UTF8 encoded location headers. + # To solve this, we re-encode the location in latin1. + location = location.encode("latin1") + return to_native_string(location, "utf8") + return None + + def should_strip_auth(self, old_url, new_url): + """Decide whether Authorization header should be removed when redirecting""" + old_parsed = urlparse(old_url) + new_parsed = urlparse(new_url) + if old_parsed.hostname != new_parsed.hostname: + return True + # Special case: allow http -> https redirect when using the standard + # ports. This isn't specified by RFC 7235, but is kept to avoid + # breaking backwards compatibility with older versions of requests + # that allowed any redirects on the same host. + if ( + old_parsed.scheme == "http" + and old_parsed.port in (80, None) + and new_parsed.scheme == "https" + and new_parsed.port in (443, None) + ): + return False + + # Handle default port usage corresponding to scheme. + changed_port = old_parsed.port != new_parsed.port + changed_scheme = old_parsed.scheme != new_parsed.scheme + default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None) + if ( + not changed_scheme + and old_parsed.port in default_port + and new_parsed.port in default_port + ): + return False + + # Standard case: root URI must match + return changed_port or changed_scheme + + def resolve_redirects( + self, + resp, + req, + stream=False, + timeout=None, + verify=True, + cert=None, + proxies=None, + yield_requests=False, + **adapter_kwargs, + ): + """Receives a Response. Returns a generator of Responses or Requests.""" + + hist = [] # keep track of history + + url = self.get_redirect_target(resp) + previous_fragment = urlparse(req.url).fragment + while url: + prepared_request = req.copy() + + # Update history and keep track of redirects. + # resp.history must ignore the original request in this loop + hist.append(resp) + resp.history = hist[1:] + + try: + resp.content # Consume socket so it can be released + except (ChunkedEncodingError, ContentDecodingError, RuntimeError): + resp.raw.read(decode_content=False) + + if len(resp.history) >= self.max_redirects: + raise TooManyRedirects( + f"Exceeded {self.max_redirects} redirects.", response=resp + ) + + # Release the connection back into the pool. + resp.close() + + # Handle redirection without scheme (see: RFC 1808 Section 4) + if url.startswith("//"): + parsed_rurl = urlparse(resp.url) + url = ":".join([to_native_string(parsed_rurl.scheme), url]) + + # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2) + parsed = urlparse(url) + if parsed.fragment == "" and previous_fragment: + parsed = parsed._replace(fragment=previous_fragment) + elif parsed.fragment: + previous_fragment = parsed.fragment + url = parsed.geturl() + + # Facilitate relative 'location' headers, as allowed by RFC 7231. + # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') + # Compliant with RFC3986, we percent encode the url. + if not parsed.netloc: + url = urljoin(resp.url, requote_uri(url)) + else: + url = requote_uri(url) + + prepared_request.url = to_native_string(url) + + self.rebuild_method(prepared_request, resp) + + # https://github.com/psf/requests/issues/1084 + if resp.status_code not in ( + codes.temporary_redirect, + codes.permanent_redirect, + ): + # https://github.com/psf/requests/issues/3490 + purged_headers = ("Content-Length", "Content-Type", "Transfer-Encoding") + for header in purged_headers: + prepared_request.headers.pop(header, None) + prepared_request.body = None + + headers = prepared_request.headers + headers.pop("Cookie", None) + + # Extract any cookies sent on the response to the cookiejar + # in the new request. Because we've mutated our copied prepared + # request, use the old one that we haven't yet touched. + extract_cookies_to_jar(prepared_request._cookies, req, resp.raw) + merge_cookies(prepared_request._cookies, self.cookies) + prepared_request.prepare_cookies(prepared_request._cookies) + + # Rebuild auth and proxy information. + proxies = self.rebuild_proxies(prepared_request, proxies) + self.rebuild_auth(prepared_request, resp) + + # A failed tell() sets `_body_position` to `object()`. This non-None + # value ensures `rewindable` will be True, allowing us to raise an + # UnrewindableBodyError, instead of hanging the connection. + rewindable = prepared_request._body_position is not None and ( + "Content-Length" in headers or "Transfer-Encoding" in headers + ) + + # Attempt to rewind consumed file-like object. + if rewindable: + rewind_body(prepared_request) + + # Override the original request. + req = prepared_request + + if yield_requests: + yield req + else: + resp = self.send( + req, + stream=stream, + timeout=timeout, + verify=verify, + cert=cert, + proxies=proxies, + allow_redirects=False, + **adapter_kwargs, + ) + + extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) + + # extract redirect url, if any, for the next loop + url = self.get_redirect_target(resp) + yield resp + + def rebuild_auth(self, prepared_request, response): + """When being redirected we may want to strip authentication from the + request to avoid leaking credentials. This method intelligently removes + and reapplies authentication where possible to avoid credential loss. + """ + headers = prepared_request.headers + url = prepared_request.url + + if "Authorization" in headers and self.should_strip_auth( + response.request.url, url + ): + # If we get redirected to a new host, we should strip out any + # authentication headers. + del headers["Authorization"] + + # .netrc might have more auth for us on our new host. + new_auth = get_netrc_auth(url) if self.trust_env else None + if new_auth is not None: + prepared_request.prepare_auth(new_auth) + + def rebuild_proxies(self, prepared_request, proxies): + """This method re-evaluates the proxy configuration by considering the + environment variables. If we are redirected to a URL covered by + NO_PROXY, we strip the proxy configuration. Otherwise, we set missing + proxy keys for this URL (in case they were stripped by a previous + redirect). + + This method also replaces the Proxy-Authorization header where + necessary. + + :rtype: dict + """ + headers = prepared_request.headers + scheme = urlparse(prepared_request.url).scheme + new_proxies = resolve_proxies(prepared_request, proxies, self.trust_env) + + if "Proxy-Authorization" in headers: + del headers["Proxy-Authorization"] + + try: + username, password = get_auth_from_url(new_proxies[scheme]) + except KeyError: + username, password = None, None + + # urllib3 handles proxy authorization for us in the standard adapter. + # Avoid appending this to TLS tunneled requests where it may be leaked. + if not scheme.startswith("https") and username and password: + headers["Proxy-Authorization"] = _basic_auth_str(username, password) + + return new_proxies + + def rebuild_method(self, prepared_request, response): + """When being redirected we may want to change the method of the request + based on certain specs or browser behavior. + """ + method = prepared_request.method + + # https://tools.ietf.org/html/rfc7231#section-6.4.4 + if response.status_code == codes.see_other and method != "HEAD": + method = "GET" + + # Do what the browsers do, despite standards... + # First, turn 302s into GETs. + if response.status_code == codes.found and method != "HEAD": + method = "GET" + + # Second, if a POST is responded to with a 301, turn it into a GET. + # This bizarre behaviour is explained in Issue 1704. + if response.status_code == codes.moved and method == "POST": + method = "GET" + + prepared_request.method = method + + +class Session(SessionRedirectMixin): + """A Requests session. + + Provides cookie persistence, connection-pooling, and configuration. + + Basic Usage:: + + >>> import requests + >>> s = requests.Session() + >>> s.get('https://httpbin.org/get') + + + Or as a context manager:: + + >>> with requests.Session() as s: + ... s.get('https://httpbin.org/get') + + """ + + __attrs__ = [ + "headers", + "cookies", + "auth", + "proxies", + "hooks", + "params", + "verify", + "cert", + "adapters", + "stream", + "trust_env", + "max_redirects", + ] + + def __init__(self): + #: A case-insensitive dictionary of headers to be sent on each + #: :class:`Request ` sent from this + #: :class:`Session `. + self.headers = default_headers() + + #: Default Authentication tuple or object to attach to + #: :class:`Request `. + self.auth = None + + #: Dictionary mapping protocol or protocol and host to the URL of the proxy + #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to + #: be used on each :class:`Request `. + self.proxies = {} + + #: Event-handling hooks. + self.hooks = default_hooks() + + #: Dictionary of querystring data to attach to each + #: :class:`Request `. The dictionary values may be lists for + #: representing multivalued query parameters. + self.params = {} + + #: Stream response content default. + self.stream = False + + #: SSL Verification default. + #: Defaults to `True`, requiring requests to verify the TLS certificate at the + #: remote end. + #: If verify is set to `False`, requests will accept any TLS certificate + #: presented by the server, and will ignore hostname mismatches and/or + #: expired certificates, which will make your application vulnerable to + #: man-in-the-middle (MitM) attacks. + #: Only set this to `False` for testing. + self.verify = True + + #: SSL client certificate default, if String, path to ssl client + #: cert file (.pem). If Tuple, ('cert', 'key') pair. + self.cert = None + + #: Maximum number of redirects allowed. If the request exceeds this + #: limit, a :class:`TooManyRedirects` exception is raised. + #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is + #: 30. + self.max_redirects = DEFAULT_REDIRECT_LIMIT + + #: Trust environment settings for proxy configuration, default + #: authentication and similar. + self.trust_env = True + + #: A CookieJar containing all currently outstanding cookies set on this + #: session. By default it is a + #: :class:`RequestsCookieJar `, but + #: may be any other ``cookielib.CookieJar`` compatible object. + self.cookies = cookiejar_from_dict({}) + + # Default connection adapters. + self.adapters = OrderedDict() + self.mount("https://", HTTPAdapter()) + self.mount("http://", HTTPAdapter()) + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def prepare_request(self, request): + """Constructs a :class:`PreparedRequest ` for + transmission and returns it. The :class:`PreparedRequest` has settings + merged from the :class:`Request ` instance and those of the + :class:`Session`. + + :param request: :class:`Request` instance to prepare with this + session's settings. + :rtype: requests.PreparedRequest + """ + cookies = request.cookies or {} + + # Bootstrap CookieJar. + if not isinstance(cookies, cookielib.CookieJar): + cookies = cookiejar_from_dict(cookies) + + # Merge with session cookies + merged_cookies = merge_cookies( + merge_cookies(RequestsCookieJar(), self.cookies), cookies + ) + + # Set environment's basic authentication if not explicitly set. + auth = request.auth + if self.trust_env and not auth and not self.auth: + auth = get_netrc_auth(request.url) + + p = PreparedRequest() + p.prepare( + method=request.method.upper(), + url=request.url, + files=request.files, + data=request.data, + json=request.json, + headers=merge_setting( + request.headers, self.headers, dict_class=CaseInsensitiveDict + ), + params=merge_setting(request.params, self.params), + auth=merge_setting(auth, self.auth), + cookies=merged_cookies, + hooks=merge_hooks(request.hooks, self.hooks), + ) + return p + + def request( + self, + method, + url, + params=None, + data=None, + headers=None, + cookies=None, + files=None, + auth=None, + timeout=None, + allow_redirects=True, + proxies=None, + hooks=None, + stream=None, + verify=None, + cert=None, + json=None, + ): + """Constructs a :class:`Request `, prepares it and sends it. + Returns :class:`Response ` object. + + :param method: method for the new :class:`Request` object. + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary or bytes to be sent in the query + string for the :class:`Request`. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) json to send in the body of the + :class:`Request`. + :param headers: (optional) Dictionary of HTTP Headers to send with the + :class:`Request`. + :param cookies: (optional) Dict or CookieJar object to send with the + :class:`Request`. + :param files: (optional) Dictionary of ``'filename': file-like-objects`` + for multipart encoding upload. + :param auth: (optional) Auth tuple or callable to enable + Basic/Digest/Custom HTTP Auth. + :param timeout: (optional) How many seconds to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) ` tuple. + :type timeout: float or tuple + :param allow_redirects: (optional) Set to True by default. + :type allow_redirects: bool + :param proxies: (optional) Dictionary mapping protocol or protocol and + hostname to the URL of the proxy. + :param hooks: (optional) Dictionary mapping hook name to one event or + list of events, event must be callable. + :param stream: (optional) whether to immediately download the response + content. Defaults to ``False``. + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use. Defaults to ``True``. When set to + ``False``, requests will accept any TLS certificate presented by + the server, and will ignore hostname mismatches and/or expired + certificates, which will make your application vulnerable to + man-in-the-middle (MitM) attacks. Setting verify to ``False`` + may be useful during local development or testing. + :param cert: (optional) if String, path to ssl client cert file (.pem). + If Tuple, ('cert', 'key') pair. + :rtype: requests.Response + """ + # Create the Request. + req = Request( + method=method.upper(), + url=url, + headers=headers, + files=files, + data=data or {}, + json=json, + params=params or {}, + auth=auth, + cookies=cookies, + hooks=hooks, + ) + prep = self.prepare_request(req) + + proxies = proxies or {} + + settings = self.merge_environment_settings( + prep.url, proxies, stream, verify, cert + ) + + # Send the request. + send_kwargs = { + "timeout": timeout, + "allow_redirects": allow_redirects, + } + send_kwargs.update(settings) + resp = self.send(prep, **send_kwargs) + + return resp + + def get(self, url, **kwargs): + r"""Sends a GET request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault("allow_redirects", True) + return self.request("GET", url, **kwargs) + + def options(self, url, **kwargs): + r"""Sends a OPTIONS request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault("allow_redirects", True) + return self.request("OPTIONS", url, **kwargs) + + def head(self, url, **kwargs): + r"""Sends a HEAD request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault("allow_redirects", False) + return self.request("HEAD", url, **kwargs) + + def post(self, url, data=None, json=None, **kwargs): + r"""Sends a POST request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) json to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request("POST", url, data=data, json=json, **kwargs) + + def put(self, url, data=None, **kwargs): + r"""Sends a PUT request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request("PUT", url, data=data, **kwargs) + + def patch(self, url, data=None, **kwargs): + r"""Sends a PATCH request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request("PATCH", url, data=data, **kwargs) + + def delete(self, url, **kwargs): + r"""Sends a DELETE request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request("DELETE", url, **kwargs) + + def send(self, request, **kwargs): + """Send a given PreparedRequest. + + :rtype: requests.Response + """ + # Set defaults that the hooks can utilize to ensure they always have + # the correct parameters to reproduce the previous request. + kwargs.setdefault("stream", self.stream) + kwargs.setdefault("verify", self.verify) + kwargs.setdefault("cert", self.cert) + if "proxies" not in kwargs: + kwargs["proxies"] = resolve_proxies(request, self.proxies, self.trust_env) + + # It's possible that users might accidentally send a Request object. + # Guard against that specific failure case. + if isinstance(request, Request): + raise ValueError("You can only send PreparedRequests.") + + # Set up variables needed for resolve_redirects and dispatching of hooks + allow_redirects = kwargs.pop("allow_redirects", True) + stream = kwargs.get("stream") + hooks = request.hooks + + # Get the appropriate adapter to use + adapter = self.get_adapter(url=request.url) + + # Start time (approximately) of the request + start = preferred_clock() + + # Send the request + r = adapter.send(request, **kwargs) + + # Total elapsed time of the request (approximately) + elapsed = preferred_clock() - start + r.elapsed = timedelta(seconds=elapsed) + + # Response manipulation hooks + r = dispatch_hook("response", hooks, r, **kwargs) + + # Persist cookies + if r.history: + # If the hooks create history then we want those cookies too + for resp in r.history: + extract_cookies_to_jar(self.cookies, resp.request, resp.raw) + + extract_cookies_to_jar(self.cookies, request, r.raw) + + # Resolve redirects if allowed. + if allow_redirects: + # Redirect resolving generator. + gen = self.resolve_redirects(r, request, **kwargs) + history = [resp for resp in gen] + else: + history = [] + + # Shuffle things around if there's history. + if history: + # Insert the first (original) request at the start + history.insert(0, r) + # Get the last request made + r = history.pop() + r.history = history + + # If redirects aren't being followed, store the response on the Request for Response.next(). + if not allow_redirects: + try: + r._next = next( + self.resolve_redirects(r, request, yield_requests=True, **kwargs) + ) + except StopIteration: + pass + + if not stream: + r.content + + return r + + def merge_environment_settings(self, url, proxies, stream, verify, cert): + """ + Check the environment and merge it with some settings. + + :rtype: dict + """ + # Gather clues from the surrounding environment. + if self.trust_env: + # Set environment's proxies. + no_proxy = proxies.get("no_proxy") if proxies is not None else None + env_proxies = get_environ_proxies(url, no_proxy=no_proxy) + for k, v in env_proxies.items(): + proxies.setdefault(k, v) + + # Look for requests environment configuration + # and be compatible with cURL. + if verify is True or verify is None: + verify = ( + os.environ.get("REQUESTS_CA_BUNDLE") + or os.environ.get("CURL_CA_BUNDLE") + or verify + ) + + # Merge all the kwargs. + proxies = merge_setting(proxies, self.proxies) + stream = merge_setting(stream, self.stream) + verify = merge_setting(verify, self.verify) + cert = merge_setting(cert, self.cert) + + return {"proxies": proxies, "stream": stream, "verify": verify, "cert": cert} + + def get_adapter(self, url): + """ + Returns the appropriate connection adapter for the given URL. + + :rtype: requests.adapters.BaseAdapter + """ + for prefix, adapter in self.adapters.items(): + if url.lower().startswith(prefix.lower()): + return adapter + + # Nothing matches :-/ + raise InvalidSchema(f"No connection adapters were found for {url!r}") + + def close(self): + """Closes all adapters and as such the session""" + for v in self.adapters.values(): + v.close() + + def mount(self, prefix, adapter): + """Registers a connection adapter to a prefix. + + Adapters are sorted in descending order by prefix length. + """ + self.adapters[prefix] = adapter + keys_to_move = [k for k in self.adapters if len(k) < len(prefix)] + + for key in keys_to_move: + self.adapters[key] = self.adapters.pop(key) + + def __getstate__(self): + state = {attr: getattr(self, attr, None) for attr in self.__attrs__} + return state + + def __setstate__(self, state): + for attr, value in state.items(): + setattr(self, attr, value) + + +def session(): + """ + Returns a :class:`Session` for context-management. + + .. deprecated:: 1.0.0 + + This method has been deprecated since version 1.0.0 and is only kept for + backwards compatibility. New code should use :class:`~requests.sessions.Session` + to create a session. This may be removed at a future date. + + :rtype: Session + """ + return Session() diff --git a/venv/lib/python3.13/site-packages/requests/status_codes.py b/venv/lib/python3.13/site-packages/requests/status_codes.py new file mode 100644 index 0000000000000000000000000000000000000000..c7945a2f06897ed980cc575df2f48d9e6c1a9f7e --- /dev/null +++ b/venv/lib/python3.13/site-packages/requests/status_codes.py @@ -0,0 +1,128 @@ +r""" +The ``codes`` object defines a mapping from common names for HTTP statuses +to their numerical codes, accessible either as attributes or as dictionary +items. + +Example:: + + >>> import requests + >>> requests.codes['temporary_redirect'] + 307 + >>> requests.codes.teapot + 418 + >>> requests.codes['\o/'] + 200 + +Some codes have multiple names, and both upper- and lower-case versions of +the names are allowed. For example, ``codes.ok``, ``codes.OK``, and +``codes.okay`` all correspond to the HTTP status code 200. +""" + +from .structures import LookupDict + +_codes = { + # Informational. + 100: ("continue",), + 101: ("switching_protocols",), + 102: ("processing", "early-hints"), + 103: ("checkpoint",), + 122: ("uri_too_long", "request_uri_too_long"), + 200: ("ok", "okay", "all_ok", "all_okay", "all_good", "\\o/", "✓"), + 201: ("created",), + 202: ("accepted",), + 203: ("non_authoritative_info", "non_authoritative_information"), + 204: ("no_content",), + 205: ("reset_content", "reset"), + 206: ("partial_content", "partial"), + 207: ("multi_status", "multiple_status", "multi_stati", "multiple_stati"), + 208: ("already_reported",), + 226: ("im_used",), + # Redirection. + 300: ("multiple_choices",), + 301: ("moved_permanently", "moved", "\\o-"), + 302: ("found",), + 303: ("see_other", "other"), + 304: ("not_modified",), + 305: ("use_proxy",), + 306: ("switch_proxy",), + 307: ("temporary_redirect", "temporary_moved", "temporary"), + 308: ( + "permanent_redirect", + "resume_incomplete", + "resume", + ), # "resume" and "resume_incomplete" to be removed in 3.0 + # Client Error. + 400: ("bad_request", "bad"), + 401: ("unauthorized",), + 402: ("payment_required", "payment"), + 403: ("forbidden",), + 404: ("not_found", "-o-"), + 405: ("method_not_allowed", "not_allowed"), + 406: ("not_acceptable",), + 407: ("proxy_authentication_required", "proxy_auth", "proxy_authentication"), + 408: ("request_timeout", "timeout"), + 409: ("conflict",), + 410: ("gone",), + 411: ("length_required",), + 412: ("precondition_failed", "precondition"), + 413: ("request_entity_too_large", "content_too_large"), + 414: ("request_uri_too_large", "uri_too_long"), + 415: ("unsupported_media_type", "unsupported_media", "media_type"), + 416: ( + "requested_range_not_satisfiable", + "requested_range", + "range_not_satisfiable", + ), + 417: ("expectation_failed",), + 418: ("im_a_teapot", "teapot", "i_am_a_teapot"), + 421: ("misdirected_request",), + 422: ("unprocessable_entity", "unprocessable", "unprocessable_content"), + 423: ("locked",), + 424: ("failed_dependency", "dependency"), + 425: ("unordered_collection", "unordered", "too_early"), + 426: ("upgrade_required", "upgrade"), + 428: ("precondition_required", "precondition"), + 429: ("too_many_requests", "too_many"), + 431: ("header_fields_too_large", "fields_too_large"), + 444: ("no_response", "none"), + 449: ("retry_with", "retry"), + 450: ("blocked_by_windows_parental_controls", "parental_controls"), + 451: ("unavailable_for_legal_reasons", "legal_reasons"), + 499: ("client_closed_request",), + # Server Error. + 500: ("internal_server_error", "server_error", "/o\\", "✗"), + 501: ("not_implemented",), + 502: ("bad_gateway",), + 503: ("service_unavailable", "unavailable"), + 504: ("gateway_timeout",), + 505: ("http_version_not_supported", "http_version"), + 506: ("variant_also_negotiates",), + 507: ("insufficient_storage",), + 509: ("bandwidth_limit_exceeded", "bandwidth"), + 510: ("not_extended",), + 511: ("network_authentication_required", "network_auth", "network_authentication"), +} + +codes = LookupDict(name="status_codes") + + +def _init(): + for code, titles in _codes.items(): + for title in titles: + setattr(codes, title, code) + if not title.startswith(("\\", "/")): + setattr(codes, title.upper(), code) + + def doc(code): + names = ", ".join(f"``{n}``" for n in _codes[code]) + return "* %d: %s" % (code, names) + + global __doc__ + __doc__ = ( + __doc__ + "\n" + "\n".join(doc(code) for code in sorted(_codes)) + if __doc__ is not None + else None + ) + + +_init() diff --git a/venv/lib/python3.13/site-packages/requests/structures.py b/venv/lib/python3.13/site-packages/requests/structures.py new file mode 100644 index 0000000000000000000000000000000000000000..188e13e4829591facb23ae0e2eda84b9807cb818 --- /dev/null +++ b/venv/lib/python3.13/site-packages/requests/structures.py @@ -0,0 +1,99 @@ +""" +requests.structures +~~~~~~~~~~~~~~~~~~~ + +Data structures that power Requests. +""" + +from collections import OrderedDict + +from .compat import Mapping, MutableMapping + + +class CaseInsensitiveDict(MutableMapping): + """A case-insensitive ``dict``-like object. + + Implements all methods and operations of + ``MutableMapping`` as well as dict's ``copy``. Also + provides ``lower_items``. + + All keys are expected to be strings. The structure remembers the + case of the last key to be set, and ``iter(instance)``, + ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()`` + will contain case-sensitive keys. However, querying and contains + testing is case insensitive:: + + cid = CaseInsensitiveDict() + cid['Accept'] = 'application/json' + cid['aCCEPT'] == 'application/json' # True + list(cid) == ['Accept'] # True + + For example, ``headers['content-encoding']`` will return the + value of a ``'Content-Encoding'`` response header, regardless + of how the header name was originally stored. + + If the constructor, ``.update``, or equality comparison + operations are given keys that have equal ``.lower()``s, the + behavior is undefined. + """ + + def __init__(self, data=None, **kwargs): + self._store = OrderedDict() + if data is None: + data = {} + self.update(data, **kwargs) + + def __setitem__(self, key, value): + # Use the lowercased key for lookups, but store the actual + # key alongside the value. + self._store[key.lower()] = (key, value) + + def __getitem__(self, key): + return self._store[key.lower()][1] + + def __delitem__(self, key): + del self._store[key.lower()] + + def __iter__(self): + return (casedkey for casedkey, mappedvalue in self._store.values()) + + def __len__(self): + return len(self._store) + + def lower_items(self): + """Like iteritems(), but with all lowercase keys.""" + return ((lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items()) + + def __eq__(self, other): + if isinstance(other, Mapping): + other = CaseInsensitiveDict(other) + else: + return NotImplemented + # Compare insensitively + return dict(self.lower_items()) == dict(other.lower_items()) + + # Copy is required + def copy(self): + return CaseInsensitiveDict(self._store.values()) + + def __repr__(self): + return str(dict(self.items())) + + +class LookupDict(dict): + """Dictionary lookup object.""" + + def __init__(self, name=None): + self.name = name + super().__init__() + + def __repr__(self): + return f"" + + def __getitem__(self, key): + # We allow fall-through here, so values default to None + + return self.__dict__.get(key, None) + + def get(self, key, default=None): + return self.__dict__.get(key, default) diff --git a/venv/lib/python3.13/site-packages/requests/utils.py b/venv/lib/python3.13/site-packages/requests/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..8ab55852cc2188c53f08462ec4319c97fa49f04b --- /dev/null +++ b/venv/lib/python3.13/site-packages/requests/utils.py @@ -0,0 +1,1086 @@ +""" +requests.utils +~~~~~~~~~~~~~~ + +This module provides utility functions that are used within Requests +that are also useful for external consumption. +""" + +import codecs +import contextlib +import io +import os +import re +import socket +import struct +import sys +import tempfile +import warnings +import zipfile +from collections import OrderedDict + +from urllib3.util import make_headers, parse_url + +from . import certs +from .__version__ import __version__ + +# to_native_string is unused here, but imported here for backwards compatibility +from ._internal_utils import ( # noqa: F401 + _HEADER_VALIDATORS_BYTE, + _HEADER_VALIDATORS_STR, + HEADER_VALIDATORS, + to_native_string, +) +from .compat import ( + Mapping, + basestring, + bytes, + getproxies, + getproxies_environment, + integer_types, + is_urllib3_1, +) +from .compat import parse_http_list as _parse_list_header +from .compat import ( + proxy_bypass, + proxy_bypass_environment, + quote, + str, + unquote, + urlparse, + urlunparse, +) +from .cookies import cookiejar_from_dict +from .exceptions import ( + FileModeWarning, + InvalidHeader, + InvalidURL, + UnrewindableBodyError, +) +from .structures import CaseInsensitiveDict + +NETRC_FILES = (".netrc", "_netrc") + +DEFAULT_CA_BUNDLE_PATH = certs.where() + +DEFAULT_PORTS = {"http": 80, "https": 443} + +# Ensure that ', ' is used to preserve previous delimiter behavior. +DEFAULT_ACCEPT_ENCODING = ", ".join( + re.split(r",\s*", make_headers(accept_encoding=True)["accept-encoding"]) +) + + +if sys.platform == "win32": + # provide a proxy_bypass version on Windows without DNS lookups + + def proxy_bypass_registry(host): + try: + import winreg + except ImportError: + return False + + try: + internetSettings = winreg.OpenKey( + winreg.HKEY_CURRENT_USER, + r"Software\Microsoft\Windows\CurrentVersion\Internet Settings", + ) + # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it + proxyEnable = int(winreg.QueryValueEx(internetSettings, "ProxyEnable")[0]) + # ProxyOverride is almost always a string + proxyOverride = winreg.QueryValueEx(internetSettings, "ProxyOverride")[0] + except (OSError, ValueError): + return False + if not proxyEnable or not proxyOverride: + return False + + # make a check value list from the registry entry: replace the + # '' string by the localhost entry and the corresponding + # canonical entry. + proxyOverride = proxyOverride.split(";") + # filter out empty strings to avoid re.match return true in the following code. + proxyOverride = filter(None, proxyOverride) + # now check if we match one of the registry values. + for test in proxyOverride: + if test == "": + if "." not in host: + return True + test = test.replace(".", r"\.") # mask dots + test = test.replace("*", r".*") # change glob sequence + test = test.replace("?", r".") # change glob char + if re.match(test, host, re.I): + return True + return False + + def proxy_bypass(host): # noqa + """Return True, if the host should be bypassed. + + Checks proxy settings gathered from the environment, if specified, + or the registry. + """ + if getproxies_environment(): + return proxy_bypass_environment(host) + else: + return proxy_bypass_registry(host) + + +def dict_to_sequence(d): + """Returns an internal sequence dictionary update.""" + + if hasattr(d, "items"): + d = d.items() + + return d + + +def super_len(o): + total_length = None + current_position = 0 + + if not is_urllib3_1 and isinstance(o, str): + # urllib3 2.x+ treats all strings as utf-8 instead + # of latin-1 (iso-8859-1) like http.client. + o = o.encode("utf-8") + + if hasattr(o, "__len__"): + total_length = len(o) + + elif hasattr(o, "len"): + total_length = o.len + + elif hasattr(o, "fileno"): + try: + fileno = o.fileno() + except (io.UnsupportedOperation, AttributeError): + # AttributeError is a surprising exception, seeing as how we've just checked + # that `hasattr(o, 'fileno')`. It happens for objects obtained via + # `Tarfile.extractfile()`, per issue 5229. + pass + else: + total_length = os.fstat(fileno).st_size + + # Having used fstat to determine the file length, we need to + # confirm that this file was opened up in binary mode. + if "b" not in o.mode: + warnings.warn( + ( + "Requests has determined the content-length for this " + "request using the binary size of the file: however, the " + "file has been opened in text mode (i.e. without the 'b' " + "flag in the mode). This may lead to an incorrect " + "content-length. In Requests 3.0, support will be removed " + "for files in text mode." + ), + FileModeWarning, + ) + + if hasattr(o, "tell"): + try: + current_position = o.tell() + except OSError: + # This can happen in some weird situations, such as when the file + # is actually a special file descriptor like stdin. In this + # instance, we don't know what the length is, so set it to zero and + # let requests chunk it instead. + if total_length is not None: + current_position = total_length + else: + if hasattr(o, "seek") and total_length is None: + # StringIO and BytesIO have seek but no usable fileno + try: + # seek to end of file + o.seek(0, 2) + total_length = o.tell() + + # seek back to current position to support + # partially read file-like objects + o.seek(current_position or 0) + except OSError: + total_length = 0 + + if total_length is None: + total_length = 0 + + return max(0, total_length - current_position) + + +def get_netrc_auth(url, raise_errors=False): + """Returns the Requests tuple auth for a given url from netrc.""" + + netrc_file = os.environ.get("NETRC") + if netrc_file is not None: + netrc_locations = (netrc_file,) + else: + netrc_locations = (f"~/{f}" for f in NETRC_FILES) + + try: + from netrc import NetrcParseError, netrc + + netrc_path = None + + for f in netrc_locations: + loc = os.path.expanduser(f) + if os.path.exists(loc): + netrc_path = loc + break + + # Abort early if there isn't one. + if netrc_path is None: + return + + ri = urlparse(url) + host = ri.hostname + + try: + _netrc = netrc(netrc_path).authenticators(host) + if _netrc: + # Return with login / password + login_i = 0 if _netrc[0] else 1 + return (_netrc[login_i], _netrc[2]) + except (NetrcParseError, OSError): + # If there was a parsing error or a permissions issue reading the file, + # we'll just skip netrc auth unless explicitly asked to raise errors. + if raise_errors: + raise + + # App Engine hackiness. + except (ImportError, AttributeError): + pass + + +def guess_filename(obj): + """Tries to guess the filename of the given object.""" + name = getattr(obj, "name", None) + if name and isinstance(name, basestring) and name[0] != "<" and name[-1] != ">": + return os.path.basename(name) + + +def extract_zipped_paths(path): + """Replace nonexistent paths that look like they refer to a member of a zip + archive with the location of an extracted copy of the target, or else + just return the provided path unchanged. + """ + if os.path.exists(path): + # this is already a valid path, no need to do anything further + return path + + # find the first valid part of the provided path and treat that as a zip archive + # assume the rest of the path is the name of a member in the archive + archive, member = os.path.split(path) + while archive and not os.path.exists(archive): + archive, prefix = os.path.split(archive) + if not prefix: + # If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split), + # we _can_ end up in an infinite loop on a rare corner case affecting a small number of users + break + member = "/".join([prefix, member]) + + if not zipfile.is_zipfile(archive): + return path + + zip_file = zipfile.ZipFile(archive) + if member not in zip_file.namelist(): + return path + + # we have a valid zip archive and a valid member of that archive + tmp = tempfile.gettempdir() + extracted_path = os.path.join(tmp, member.split("/")[-1]) + if not os.path.exists(extracted_path): + # use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition + with atomic_open(extracted_path) as file_handler: + file_handler.write(zip_file.read(member)) + return extracted_path + + +@contextlib.contextmanager +def atomic_open(filename): + """Write a file to the disk in an atomic fashion""" + tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename)) + try: + with os.fdopen(tmp_descriptor, "wb") as tmp_handler: + yield tmp_handler + os.replace(tmp_name, filename) + except BaseException: + os.remove(tmp_name) + raise + + +def from_key_val_list(value): + """Take an object and test to see if it can be represented as a + dictionary. Unless it can not be represented as such, return an + OrderedDict, e.g., + + :: + + >>> from_key_val_list([('key', 'val')]) + OrderedDict([('key', 'val')]) + >>> from_key_val_list('string') + Traceback (most recent call last): + ... + ValueError: cannot encode objects that are not 2-tuples + >>> from_key_val_list({'key': 'val'}) + OrderedDict([('key', 'val')]) + + :rtype: OrderedDict + """ + if value is None: + return None + + if isinstance(value, (str, bytes, bool, int)): + raise ValueError("cannot encode objects that are not 2-tuples") + + return OrderedDict(value) + + +def to_key_val_list(value): + """Take an object and test to see if it can be represented as a + dictionary. If it can be, return a list of tuples, e.g., + + :: + + >>> to_key_val_list([('key', 'val')]) + [('key', 'val')] + >>> to_key_val_list({'key': 'val'}) + [('key', 'val')] + >>> to_key_val_list('string') + Traceback (most recent call last): + ... + ValueError: cannot encode objects that are not 2-tuples + + :rtype: list + """ + if value is None: + return None + + if isinstance(value, (str, bytes, bool, int)): + raise ValueError("cannot encode objects that are not 2-tuples") + + if isinstance(value, Mapping): + value = value.items() + + return list(value) + + +# From mitsuhiko/werkzeug (used with permission). +def parse_list_header(value): + """Parse lists as described by RFC 2068 Section 2. + + In particular, parse comma-separated lists where the elements of + the list may include quoted-strings. A quoted-string could + contain a comma. A non-quoted string could have quotes in the + middle. Quotes are removed automatically after parsing. + + It basically works like :func:`parse_set_header` just that items + may appear multiple times and case sensitivity is preserved. + + The return value is a standard :class:`list`: + + >>> parse_list_header('token, "quoted value"') + ['token', 'quoted value'] + + To create a header from the :class:`list` again, use the + :func:`dump_header` function. + + :param value: a string with a list header. + :return: :class:`list` + :rtype: list + """ + result = [] + for item in _parse_list_header(value): + if item[:1] == item[-1:] == '"': + item = unquote_header_value(item[1:-1]) + result.append(item) + return result + + +# From mitsuhiko/werkzeug (used with permission). +def parse_dict_header(value): + """Parse lists of key, value pairs as described by RFC 2068 Section 2 and + convert them into a python dict: + + >>> d = parse_dict_header('foo="is a fish", bar="as well"') + >>> type(d) is dict + True + >>> sorted(d.items()) + [('bar', 'as well'), ('foo', 'is a fish')] + + If there is no value for a key it will be `None`: + + >>> parse_dict_header('key_without_value') + {'key_without_value': None} + + To create a header from the :class:`dict` again, use the + :func:`dump_header` function. + + :param value: a string with a dict header. + :return: :class:`dict` + :rtype: dict + """ + result = {} + for item in _parse_list_header(value): + if "=" not in item: + result[item] = None + continue + name, value = item.split("=", 1) + if value[:1] == value[-1:] == '"': + value = unquote_header_value(value[1:-1]) + result[name] = value + return result + + +# From mitsuhiko/werkzeug (used with permission). +def unquote_header_value(value, is_filename=False): + r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). + This does not use the real unquoting but what browsers are actually + using for quoting. + + :param value: the header value to unquote. + :rtype: str + """ + if value and value[0] == value[-1] == '"': + # this is not the real unquoting, but fixing this so that the + # RFC is met will result in bugs with internet explorer and + # probably some other browsers as well. IE for example is + # uploading files with "C:\foo\bar.txt" as filename + value = value[1:-1] + + # if this is a filename and the starting characters look like + # a UNC path, then just return the value without quotes. Using the + # replace sequence below on a UNC path has the effect of turning + # the leading double slash into a single slash and then + # _fix_ie_filename() doesn't work correctly. See #458. + if not is_filename or value[:2] != "\\\\": + return value.replace("\\\\", "\\").replace('\\"', '"') + return value + + +def dict_from_cookiejar(cj): + """Returns a key/value dictionary from a CookieJar. + + :param cj: CookieJar object to extract cookies from. + :rtype: dict + """ + + cookie_dict = {cookie.name: cookie.value for cookie in cj} + return cookie_dict + + +def add_dict_to_cookiejar(cj, cookie_dict): + """Returns a CookieJar from a key/value dictionary. + + :param cj: CookieJar to insert cookies into. + :param cookie_dict: Dict of key/values to insert into CookieJar. + :rtype: CookieJar + """ + + return cookiejar_from_dict(cookie_dict, cj) + + +def get_encodings_from_content(content): + """Returns encodings from given content string. + + :param content: bytestring to extract encodings from. + """ + warnings.warn( + ( + "In requests 3.0, get_encodings_from_content will be removed. For " + "more information, please see the discussion on issue #2266. (This" + " warning should only appear once.)" + ), + DeprecationWarning, + ) + + charset_re = re.compile(r']', flags=re.I) + pragma_re = re.compile(r']', flags=re.I) + xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]') + + return ( + charset_re.findall(content) + + pragma_re.findall(content) + + xml_re.findall(content) + ) + + +def _parse_content_type_header(header): + """Returns content type and parameters from given header + + :param header: string + :return: tuple containing content type and dictionary of + parameters + """ + + tokens = header.split(";") + content_type, params = tokens[0].strip(), tokens[1:] + params_dict = {} + items_to_strip = "\"' " + + for param in params: + param = param.strip() + if param: + key, value = param, True + index_of_equals = param.find("=") + if index_of_equals != -1: + key = param[:index_of_equals].strip(items_to_strip) + value = param[index_of_equals + 1 :].strip(items_to_strip) + params_dict[key.lower()] = value + return content_type, params_dict + + +def get_encoding_from_headers(headers): + """Returns encodings from given HTTP Header Dict. + + :param headers: dictionary to extract encoding from. + :rtype: str + """ + + content_type = headers.get("content-type") + + if not content_type: + return None + + content_type, params = _parse_content_type_header(content_type) + + if "charset" in params: + return params["charset"].strip("'\"") + + if "text" in content_type: + return "ISO-8859-1" + + if "application/json" in content_type: + # Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset + return "utf-8" + + +def stream_decode_response_unicode(iterator, r): + """Stream decodes an iterator.""" + + if r.encoding is None: + yield from iterator + return + + decoder = codecs.getincrementaldecoder(r.encoding)(errors="replace") + for chunk in iterator: + rv = decoder.decode(chunk) + if rv: + yield rv + rv = decoder.decode(b"", final=True) + if rv: + yield rv + + +def iter_slices(string, slice_length): + """Iterate over slices of a string.""" + pos = 0 + if slice_length is None or slice_length <= 0: + slice_length = len(string) + while pos < len(string): + yield string[pos : pos + slice_length] + pos += slice_length + + +def get_unicode_from_response(r): + """Returns the requested content back in unicode. + + :param r: Response object to get unicode content from. + + Tried: + + 1. charset from content-type + 2. fall back and replace all unicode characters + + :rtype: str + """ + warnings.warn( + ( + "In requests 3.0, get_unicode_from_response will be removed. For " + "more information, please see the discussion on issue #2266. (This" + " warning should only appear once.)" + ), + DeprecationWarning, + ) + + tried_encodings = [] + + # Try charset from content-type + encoding = get_encoding_from_headers(r.headers) + + if encoding: + try: + return str(r.content, encoding) + except UnicodeError: + tried_encodings.append(encoding) + + # Fall back: + try: + return str(r.content, encoding, errors="replace") + except TypeError: + return r.content + + +# The unreserved URI characters (RFC 3986) +UNRESERVED_SET = frozenset( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~" +) + + +def unquote_unreserved(uri): + """Un-escape any percent-escape sequences in a URI that are unreserved + characters. This leaves all reserved, illegal and non-ASCII bytes encoded. + + :rtype: str + """ + parts = uri.split("%") + for i in range(1, len(parts)): + h = parts[i][0:2] + if len(h) == 2 and h.isalnum(): + try: + c = chr(int(h, 16)) + except ValueError: + raise InvalidURL(f"Invalid percent-escape sequence: '{h}'") + + if c in UNRESERVED_SET: + parts[i] = c + parts[i][2:] + else: + parts[i] = f"%{parts[i]}" + else: + parts[i] = f"%{parts[i]}" + return "".join(parts) + + +def requote_uri(uri): + """Re-quote the given URI. + + This function passes the given URI through an unquote/quote cycle to + ensure that it is fully and consistently quoted. + + :rtype: str + """ + safe_with_percent = "!#$%&'()*+,/:;=?@[]~" + safe_without_percent = "!#$&'()*+,/:;=?@[]~" + try: + # Unquote only the unreserved characters + # Then quote only illegal characters (do not quote reserved, + # unreserved, or '%') + return quote(unquote_unreserved(uri), safe=safe_with_percent) + except InvalidURL: + # We couldn't unquote the given URI, so let's try quoting it, but + # there may be unquoted '%'s in the URI. We need to make sure they're + # properly quoted so they do not cause issues elsewhere. + return quote(uri, safe=safe_without_percent) + + +def address_in_network(ip, net): + """This function allows you to check if an IP belongs to a network subnet + + Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24 + returns False if ip = 192.168.1.1 and net = 192.168.100.0/24 + + :rtype: bool + """ + ipaddr = struct.unpack("=L", socket.inet_aton(ip))[0] + netaddr, bits = net.split("/") + netmask = struct.unpack("=L", socket.inet_aton(dotted_netmask(int(bits))))[0] + network = struct.unpack("=L", socket.inet_aton(netaddr))[0] & netmask + return (ipaddr & netmask) == (network & netmask) + + +def dotted_netmask(mask): + """Converts mask from /xx format to xxx.xxx.xxx.xxx + + Example: if mask is 24 function returns 255.255.255.0 + + :rtype: str + """ + bits = 0xFFFFFFFF ^ (1 << 32 - mask) - 1 + return socket.inet_ntoa(struct.pack(">I", bits)) + + +def is_ipv4_address(string_ip): + """ + :rtype: bool + """ + try: + socket.inet_aton(string_ip) + except OSError: + return False + return True + + +def is_valid_cidr(string_network): + """ + Very simple check of the cidr format in no_proxy variable. + + :rtype: bool + """ + if string_network.count("/") == 1: + try: + mask = int(string_network.split("/")[1]) + except ValueError: + return False + + if mask < 1 or mask > 32: + return False + + try: + socket.inet_aton(string_network.split("/")[0]) + except OSError: + return False + else: + return False + return True + + +@contextlib.contextmanager +def set_environ(env_name, value): + """Set the environment variable 'env_name' to 'value' + + Save previous value, yield, and then restore the previous value stored in + the environment variable 'env_name'. + + If 'value' is None, do nothing""" + value_changed = value is not None + if value_changed: + old_value = os.environ.get(env_name) + os.environ[env_name] = value + try: + yield + finally: + if value_changed: + if old_value is None: + del os.environ[env_name] + else: + os.environ[env_name] = old_value + + +def should_bypass_proxies(url, no_proxy): + """ + Returns whether we should bypass proxies or not. + + :rtype: bool + """ + + # Prioritize lowercase environment variables over uppercase + # to keep a consistent behaviour with other http projects (curl, wget). + def get_proxy(key): + return os.environ.get(key) or os.environ.get(key.upper()) + + # First check whether no_proxy is defined. If it is, check that the URL + # we're getting isn't in the no_proxy list. + no_proxy_arg = no_proxy + if no_proxy is None: + no_proxy = get_proxy("no_proxy") + parsed = urlparse(url) + + if parsed.hostname is None: + # URLs don't always have hostnames, e.g. file:/// urls. + return True + + if no_proxy: + # We need to check whether we match here. We need to see if we match + # the end of the hostname, both with and without the port. + no_proxy = (host for host in no_proxy.replace(" ", "").split(",") if host) + + if is_ipv4_address(parsed.hostname): + for proxy_ip in no_proxy: + if is_valid_cidr(proxy_ip): + if address_in_network(parsed.hostname, proxy_ip): + return True + elif parsed.hostname == proxy_ip: + # If no_proxy ip was defined in plain IP notation instead of cidr notation & + # matches the IP of the index + return True + else: + host_with_port = parsed.hostname + if parsed.port: + host_with_port += f":{parsed.port}" + + for host in no_proxy: + if parsed.hostname.endswith(host) or host_with_port.endswith(host): + # The URL does match something in no_proxy, so we don't want + # to apply the proxies on this URL. + return True + + with set_environ("no_proxy", no_proxy_arg): + # parsed.hostname can be `None` in cases such as a file URI. + try: + bypass = proxy_bypass(parsed.hostname) + except (TypeError, socket.gaierror): + bypass = False + + if bypass: + return True + + return False + + +def get_environ_proxies(url, no_proxy=None): + """ + Return a dict of environment proxies. + + :rtype: dict + """ + if should_bypass_proxies(url, no_proxy=no_proxy): + return {} + else: + return getproxies() + + +def select_proxy(url, proxies): + """Select a proxy for the url, if applicable. + + :param url: The url being for the request + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs + """ + proxies = proxies or {} + urlparts = urlparse(url) + if urlparts.hostname is None: + return proxies.get(urlparts.scheme, proxies.get("all")) + + proxy_keys = [ + urlparts.scheme + "://" + urlparts.hostname, + urlparts.scheme, + "all://" + urlparts.hostname, + "all", + ] + proxy = None + for proxy_key in proxy_keys: + if proxy_key in proxies: + proxy = proxies[proxy_key] + break + + return proxy + + +def resolve_proxies(request, proxies, trust_env=True): + """This method takes proxy information from a request and configuration + input to resolve a mapping of target proxies. This will consider settings + such as NO_PROXY to strip proxy configurations. + + :param request: Request or PreparedRequest + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs + :param trust_env: Boolean declaring whether to trust environment configs + + :rtype: dict + """ + proxies = proxies if proxies is not None else {} + url = request.url + scheme = urlparse(url).scheme + no_proxy = proxies.get("no_proxy") + new_proxies = proxies.copy() + + if trust_env and not should_bypass_proxies(url, no_proxy=no_proxy): + environ_proxies = get_environ_proxies(url, no_proxy=no_proxy) + + proxy = environ_proxies.get(scheme, environ_proxies.get("all")) + + if proxy: + new_proxies.setdefault(scheme, proxy) + return new_proxies + + +def default_user_agent(name="python-requests"): + """ + Return a string representing the default user agent. + + :rtype: str + """ + return f"{name}/{__version__}" + + +def default_headers(): + """ + :rtype: requests.structures.CaseInsensitiveDict + """ + return CaseInsensitiveDict( + { + "User-Agent": default_user_agent(), + "Accept-Encoding": DEFAULT_ACCEPT_ENCODING, + "Accept": "*/*", + "Connection": "keep-alive", + } + ) + + +def parse_header_links(value): + """Return a list of parsed link headers proxies. + + i.e. Link: ; rel=front; type="image/jpeg",; rel=back;type="image/jpeg" + + :rtype: list + """ + + links = [] + + replace_chars = " '\"" + + value = value.strip(replace_chars) + if not value: + return links + + for val in re.split(", *<", value): + try: + url, params = val.split(";", 1) + except ValueError: + url, params = val, "" + + link = {"url": url.strip("<> '\"")} + + for param in params.split(";"): + try: + key, value = param.split("=") + except ValueError: + break + + link[key.strip(replace_chars)] = value.strip(replace_chars) + + links.append(link) + + return links + + +# Null bytes; no need to recreate these on each call to guess_json_utf +_null = "\x00".encode("ascii") # encoding to ASCII for Python 3 +_null2 = _null * 2 +_null3 = _null * 3 + + +def guess_json_utf(data): + """ + :rtype: str + """ + # JSON always starts with two ASCII characters, so detection is as + # easy as counting the nulls and from their location and count + # determine the encoding. Also detect a BOM, if present. + sample = data[:4] + if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): + return "utf-32" # BOM included + if sample[:3] == codecs.BOM_UTF8: + return "utf-8-sig" # BOM included, MS style (discouraged) + if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): + return "utf-16" # BOM included + nullcount = sample.count(_null) + if nullcount == 0: + return "utf-8" + if nullcount == 2: + if sample[::2] == _null2: # 1st and 3rd are null + return "utf-16-be" + if sample[1::2] == _null2: # 2nd and 4th are null + return "utf-16-le" + # Did not detect 2 valid UTF-16 ascii-range characters + if nullcount == 3: + if sample[:3] == _null3: + return "utf-32-be" + if sample[1:] == _null3: + return "utf-32-le" + # Did not detect a valid UTF-32 ascii-range character + return None + + +def prepend_scheme_if_needed(url, new_scheme): + """Given a URL that may or may not have a scheme, prepend the given scheme. + Does not replace a present scheme with the one provided as an argument. + + :rtype: str + """ + parsed = parse_url(url) + scheme, auth, host, port, path, query, fragment = parsed + + # A defect in urlparse determines that there isn't a netloc present in some + # urls. We previously assumed parsing was overly cautious, and swapped the + # netloc and path. Due to a lack of tests on the original defect, this is + # maintained with parse_url for backwards compatibility. + netloc = parsed.netloc + if not netloc: + netloc, path = path, netloc + + if auth: + # parse_url doesn't provide the netloc with auth + # so we'll add it ourselves. + netloc = "@".join([auth, netloc]) + if scheme is None: + scheme = new_scheme + if path is None: + path = "" + + return urlunparse((scheme, netloc, path, "", query, fragment)) + + +def get_auth_from_url(url): + """Given a url with authentication components, extract them into a tuple of + username,password. + + :rtype: (str,str) + """ + parsed = urlparse(url) + + try: + auth = (unquote(parsed.username), unquote(parsed.password)) + except (AttributeError, TypeError): + auth = ("", "") + + return auth + + +def check_header_validity(header): + """Verifies that header parts don't contain leading whitespace + reserved characters, or return characters. + + :param header: tuple, in the format (name, value). + """ + name, value = header + _validate_header_part(header, name, 0) + _validate_header_part(header, value, 1) + + +def _validate_header_part(header, header_part, header_validator_index): + if isinstance(header_part, str): + validator = _HEADER_VALIDATORS_STR[header_validator_index] + elif isinstance(header_part, bytes): + validator = _HEADER_VALIDATORS_BYTE[header_validator_index] + else: + raise InvalidHeader( + f"Header part ({header_part!r}) from {header} " + f"must be of type str or bytes, not {type(header_part)}" + ) + + if not validator.match(header_part): + header_kind = "name" if header_validator_index == 0 else "value" + raise InvalidHeader( + f"Invalid leading whitespace, reserved character(s), or return " + f"character(s) in header {header_kind}: {header_part!r}" + ) + + +def urldefragauth(url): + """ + Given a url remove the fragment and the authentication part. + + :rtype: str + """ + scheme, netloc, path, params, query, fragment = urlparse(url) + + # see func:`prepend_scheme_if_needed` + if not netloc: + netloc, path = path, netloc + + netloc = netloc.rsplit("@", 1)[-1] + + return urlunparse((scheme, netloc, path, params, query, "")) + + +def rewind_body(prepared_request): + """Move file pointer back to its recorded starting position + so it can be read again on redirect. + """ + body_seek = getattr(prepared_request.body, "seek", None) + if body_seek is not None and isinstance( + prepared_request._body_position, integer_types + ): + try: + body_seek(prepared_request._body_position) + except OSError: + raise UnrewindableBodyError( + "An error occurred when rewinding request body for redirect." + ) + else: + raise UnrewindableBodyError("Unable to rewind request body for redirect.") diff --git a/venv/lib/python3.13/site-packages/safetensors-0.7.0.dist-info/INSTALLER b/venv/lib/python3.13/site-packages/safetensors-0.7.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.13/site-packages/safetensors-0.7.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.13/site-packages/safetensors-0.7.0.dist-info/METADATA b/venv/lib/python3.13/site-packages/safetensors-0.7.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..c3ea2525b8c3fe155adbbf683902ece3805d9e79 --- /dev/null +++ b/venv/lib/python3.13/site-packages/safetensors-0.7.0.dist-info/METADATA @@ -0,0 +1,133 @@ +Metadata-Version: 2.4 +Name: safetensors +Version: 0.7.0 +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Education +Classifier: Intended Audience :: Science/Research +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence +Classifier: Typing :: Typed +Requires-Dist: numpy>=1.21.6 ; extra == 'numpy' +Requires-Dist: packaging ; extra == 'torch' +Requires-Dist: safetensors[numpy] ; extra == 'torch' +Requires-Dist: torch>=1.10 ; extra == 'torch' +Requires-Dist: safetensors[numpy] ; extra == 'tensorflow' +Requires-Dist: tensorflow>=2.11.0 ; extra == 'tensorflow' +Requires-Dist: safetensors[numpy] ; extra == 'pinned-tf' +Requires-Dist: tensorflow==2.18.0 ; extra == 'pinned-tf' +Requires-Dist: safetensors[numpy] ; extra == 'jax' +Requires-Dist: flax>=0.6.3 ; extra == 'jax' +Requires-Dist: jax>=0.3.25 ; extra == 'jax' +Requires-Dist: jaxlib>=0.3.25 ; extra == 'jax' +Requires-Dist: mlx>=0.0.9 ; extra == 'mlx' +Requires-Dist: safetensors[numpy] ; extra == 'paddlepaddle' +Requires-Dist: paddlepaddle>=2.4.1 ; extra == 'paddlepaddle' +Requires-Dist: ruff ; extra == 'quality' +Requires-Dist: safetensors[numpy] ; extra == 'testing' +Requires-Dist: h5py>=3.7.0 ; extra == 'testing' +Requires-Dist: huggingface-hub>=0.12.1 ; extra == 'testing' +Requires-Dist: setuptools-rust>=1.5.2 ; extra == 'testing' +Requires-Dist: pytest>=7.2.0 ; extra == 'testing' +Requires-Dist: pytest-benchmark>=4.0.0 ; extra == 'testing' +Requires-Dist: hypothesis>=6.70.2 ; extra == 'testing' +Requires-Dist: safetensors[numpy] ; extra == 'testingfree' +Requires-Dist: huggingface-hub>=0.12.1 ; extra == 'testingfree' +Requires-Dist: setuptools-rust>=1.5.2 ; extra == 'testingfree' +Requires-Dist: pytest>=7.2.0 ; extra == 'testingfree' +Requires-Dist: pytest-benchmark>=4.0.0 ; extra == 'testingfree' +Requires-Dist: hypothesis>=6.70.2 ; extra == 'testingfree' +Requires-Dist: safetensors[torch] ; extra == 'all' +Requires-Dist: safetensors[numpy] ; extra == 'all' +Requires-Dist: safetensors[pinned-tf] ; extra == 'all' +Requires-Dist: safetensors[jax] ; extra == 'all' +Requires-Dist: safetensors[paddlepaddle] ; extra == 'all' +Requires-Dist: safetensors[quality] ; extra == 'all' +Requires-Dist: safetensors[testing] ; extra == 'all' +Requires-Dist: safetensors[all] ; extra == 'dev' +Provides-Extra: numpy +Provides-Extra: torch +Provides-Extra: tensorflow +Provides-Extra: pinned-tf +Provides-Extra: jax +Provides-Extra: mlx +Provides-Extra: paddlepaddle +Provides-Extra: quality +Provides-Extra: testing +Provides-Extra: testingfree +Provides-Extra: all +Provides-Extra: dev +License-File: LICENSE +Author-email: Nicolas Patry +Requires-Python: >=3.9 +Description-Content-Type: text/markdown; charset=UTF-8; variant=GFM +Project-URL: Homepage, https://github.com/huggingface/safetensors +Project-URL: Source, https://github.com/huggingface/safetensors + +## Installation + +``` +pip install safetensors +``` + + +## Usage + +### Numpy + +```python +from safetensors.numpy import save_file, load_file +import numpy as np + +tensors = { + "a": np.zeros((2, 2)), + "b": np.zeros((2, 3), dtype=np.uint8) +} + +save_file(tensors, "./model.safetensors") + + +# Now loading +loaded = load_file("./model.safetensors") +``` + +### Torch + +```python +from safetensors.torch import save_file, load_file +import torch + +tensors = { + "a": torch.zeros((2, 2)), + "b": torch.zeros((2, 3), dtype=torch.uint8) +} + +save_file(tensors, "./model.safetensors") + + +# Now loading +loaded = load_file("./model.safetensors") +``` + +### Developing + +``` +# inside ./safetensors/bindings/python +pip install .[dev] +``` +Should be enough to install this library locally. + +### Testing + +``` +# inside ./safetensors/bindings/python +pip install .[dev] +pytest -sv tests/ +``` + diff --git a/venv/lib/python3.13/site-packages/safetensors-0.7.0.dist-info/RECORD b/venv/lib/python3.13/site-packages/safetensors-0.7.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..d558df3ba18d14b9ec468b19f9a3b3399753e76b --- /dev/null +++ b/venv/lib/python3.13/site-packages/safetensors-0.7.0.dist-info/RECORD @@ -0,0 +1,22 @@ +safetensors-0.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +safetensors-0.7.0.dist-info/METADATA,sha256=aAkyb78XRE3VIKsZ8KVbehhHbb5SpDBGa79TiZZ8Kqo,4125 +safetensors-0.7.0.dist-info/RECORD,, +safetensors-0.7.0.dist-info/WHEEL,sha256=EsgGQg7OBGIn-zS1ipDRPjO8C2qSQ0GRrd2xuL_Pyq0,143 +safetensors-0.7.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357 +safetensors/__init__.py,sha256=wbzKZlAVgnAsjHmqryuSJCiADvpDZxNGCfj8VzY0At0,194 +safetensors/__init__.pyi,sha256=IpwsrzRWJA2yR8TxMEC3RHgM_5TiDgSFqyvrAxAa15U,4019 +safetensors/__pycache__/__init__.cpython-313.pyc,, +safetensors/__pycache__/flax.cpython-313.pyc,, +safetensors/__pycache__/mlx.cpython-313.pyc,, +safetensors/__pycache__/numpy.cpython-313.pyc,, +safetensors/__pycache__/paddle.cpython-313.pyc,, +safetensors/__pycache__/tensorflow.cpython-313.pyc,, +safetensors/__pycache__/torch.cpython-313.pyc,, +safetensors/_safetensors_rust.abi3.so,sha256=h9qPRzPm7PhApdjisd6IjK0BLCN1URiRlWfsSAp9fgI,1216632 +safetensors/flax.py,sha256=T59elUqzVDyGYGdR78QzNEuwyAc8KrEO0EuLBSKOnUs,3853 +safetensors/mlx.py,sha256=IR51jRpcJq6epb0Agj8VsxI9xqBS6NjeAJnr-Ny0jJU,3850 +safetensors/numpy.py,sha256=rit_12-IfZtRgip_VLd8nPAcCXyeM2fPrCDZ7OiyxSY,5028 +safetensors/paddle.py,sha256=LrDwqQbwFnQXiY3M601IU7G6FBctX6tyHHK3_UH6lxE,8721 +safetensors/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +safetensors/tensorflow.py,sha256=AZ-O7-gM-JqTfjczZyCUAHm3Er-GSQnQWaFyY7mAIQc,3903 +safetensors/torch.py,sha256=U0acZVahLsxvqPa1GitMRiaByu6XVbmiBITGtuznBEY,18610 diff --git a/venv/lib/python3.13/site-packages/safetensors-0.7.0.dist-info/WHEEL b/venv/lib/python3.13/site-packages/safetensors-0.7.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..fff65e274d8556cd5c2f78dc3cbfaa7bd6375276 --- /dev/null +++ b/venv/lib/python3.13/site-packages/safetensors-0.7.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: maturin (1.10.2) +Root-Is-Purelib: false +Tag: cp38-abi3-manylinux_2_17_x86_64 +Tag: cp38-abi3-manylinux2014_x86_64 diff --git a/venv/lib/python3.13/site-packages/tqdm-4.67.1.dist-info/INSTALLER b/venv/lib/python3.13/site-packages/tqdm-4.67.1.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.13/site-packages/tqdm-4.67.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.13/site-packages/tqdm-4.67.1.dist-info/METADATA b/venv/lib/python3.13/site-packages/tqdm-4.67.1.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..181b4dc8b2f8697d1c0374a612ffd8b2f2db346a --- /dev/null +++ b/venv/lib/python3.13/site-packages/tqdm-4.67.1.dist-info/METADATA @@ -0,0 +1,1594 @@ +Metadata-Version: 2.1 +Name: tqdm +Version: 4.67.1 +Summary: Fast, Extensible Progress Meter +Maintainer-email: tqdm developers +License: MPL-2.0 AND MIT +Project-URL: homepage, https://tqdm.github.io +Project-URL: repository, https://github.com/tqdm/tqdm +Project-URL: changelog, https://tqdm.github.io/releases +Project-URL: wiki, https://github.com/tqdm/tqdm/wiki +Keywords: progressbar,progressmeter,progress,bar,meter,rate,eta,console,terminal,time +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Environment :: MacOS X +Classifier: Environment :: Other Environment +Classifier: Environment :: Win32 (MS Windows) +Classifier: Environment :: X11 Applications +Classifier: Framework :: IPython +Classifier: Framework :: Jupyter +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Education +Classifier: Intended Audience :: End Users/Desktop +Classifier: Intended Audience :: Other Audience +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: MIT License +Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) +Classifier: Operating System :: MacOS +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft +Classifier: Operating System :: Microsoft :: MS-DOS +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX +Classifier: Operating System :: POSIX :: BSD +Classifier: Operating System :: POSIX :: BSD :: FreeBSD +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: POSIX :: SunOS/Solaris +Classifier: Operating System :: Unix +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: Implementation +Classifier: Programming Language :: Python :: Implementation :: IronPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Unix Shell +Classifier: Topic :: Desktop Environment +Classifier: Topic :: Education :: Computer Aided Instruction (CAI) +Classifier: Topic :: Education :: Testing +Classifier: Topic :: Office/Business +Classifier: Topic :: Other/Nonlisted Topic +Classifier: Topic :: Software Development :: Build Tools +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Software Development :: Pre-processors +Classifier: Topic :: Software Development :: User Interfaces +Classifier: Topic :: System :: Installation/Setup +Classifier: Topic :: System :: Logging +Classifier: Topic :: System :: Monitoring +Classifier: Topic :: System :: Shells +Classifier: Topic :: Terminals +Classifier: Topic :: Utilities +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENCE +Requires-Dist: colorama; platform_system == "Windows" +Provides-Extra: dev +Requires-Dist: pytest>=6; extra == "dev" +Requires-Dist: pytest-cov; extra == "dev" +Requires-Dist: pytest-timeout; extra == "dev" +Requires-Dist: pytest-asyncio>=0.24; extra == "dev" +Requires-Dist: nbval; extra == "dev" +Provides-Extra: discord +Requires-Dist: requests; extra == "discord" +Provides-Extra: slack +Requires-Dist: slack-sdk; extra == "slack" +Provides-Extra: telegram +Requires-Dist: requests; extra == "telegram" +Provides-Extra: notebook +Requires-Dist: ipywidgets>=6; extra == "notebook" + +|Logo| + +tqdm +==== + +|Py-Versions| |Versions| |Conda-Forge-Status| |Docker| |Snapcraft| + +|Build-Status| |Coverage-Status| |Branch-Coverage-Status| |Codacy-Grade| |Libraries-Rank| |PyPI-Downloads| + +|LICENCE| |OpenHub-Status| |binder-demo| |awesome-python| + +``tqdm`` derives from the Arabic word *taqaddum* (تقدّم) which can mean "progress," +and is an abbreviation for "I love you so much" in Spanish (*te quiero demasiado*). + +Instantly make your loops show a smart progress meter - just wrap any +iterable with ``tqdm(iterable)``, and you're done! + +.. code:: python + + from tqdm import tqdm + for i in tqdm(range(10000)): + ... + +``76%|████████████████████████        | 7568/10000 [00:33<00:10, 229.00it/s]`` + +``trange(N)`` can be also used as a convenient shortcut for +``tqdm(range(N))``. + +|Screenshot| + |Video| |Slides| |Merch| + +It can also be executed as a module with pipes: + +.. code:: sh + + $ seq 9999999 | tqdm --bytes | wc -l + 75.2MB [00:00, 217MB/s] + 9999999 + + $ tar -zcf - docs/ | tqdm --bytes --total `du -sb docs/ | cut -f1` \ + > backup.tgz + 32%|██████████▍ | 8.89G/27.9G [00:42<01:31, 223MB/s] + +Overhead is low -- about 60ns per iteration (80ns with ``tqdm.gui``), and is +unit tested against performance regression. +By comparison, the well-established +`ProgressBar `__ has +an 800ns/iter overhead. + +In addition to its low overhead, ``tqdm`` uses smart algorithms to predict +the remaining time and to skip unnecessary iteration displays, which allows +for a negligible overhead in most cases. + +``tqdm`` works on any platform +(Linux, Windows, Mac, FreeBSD, NetBSD, Solaris/SunOS), +in any console or in a GUI, and is also friendly with IPython/Jupyter notebooks. + +``tqdm`` does not require any dependencies (not even ``curses``!), just +Python and an environment supporting ``carriage return \r`` and +``line feed \n`` control characters. + +------------------------------------------ + +.. contents:: Table of contents + :backlinks: top + :local: + + +Installation +------------ + +Latest PyPI stable release +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +|Versions| |PyPI-Downloads| |Libraries-Dependents| + +.. code:: sh + + pip install tqdm + +Latest development release on GitHub +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +|GitHub-Status| |GitHub-Stars| |GitHub-Commits| |GitHub-Forks| |GitHub-Updated| + +Pull and install pre-release ``devel`` branch: + +.. code:: sh + + pip install "git+https://github.com/tqdm/tqdm.git@devel#egg=tqdm" + +Latest Conda release +~~~~~~~~~~~~~~~~~~~~ + +|Conda-Forge-Status| + +.. code:: sh + + conda install -c conda-forge tqdm + +Latest Snapcraft release +~~~~~~~~~~~~~~~~~~~~~~~~ + +|Snapcraft| + +There are 3 channels to choose from: + +.. code:: sh + + snap install tqdm # implies --stable, i.e. latest tagged release + snap install tqdm --candidate # master branch + snap install tqdm --edge # devel branch + +Note that ``snap`` binaries are purely for CLI use (not ``import``-able), and +automatically set up ``bash`` tab-completion. + +Latest Docker release +~~~~~~~~~~~~~~~~~~~~~ + +|Docker| + +.. code:: sh + + docker pull tqdm/tqdm + docker run -i --rm tqdm/tqdm --help + +Other +~~~~~ + +There are other (unofficial) places where ``tqdm`` may be downloaded, particularly for CLI use: + +|Repology| + +.. |Repology| image:: https://repology.org/badge/tiny-repos/python:tqdm.svg + :target: https://repology.org/project/python:tqdm/versions + +Changelog +--------- + +The list of all changes is available either on GitHub's Releases: +|GitHub-Status|, on the +`wiki `__, or on the +`website `__. + + +Usage +----- + +``tqdm`` is very versatile and can be used in a number of ways. +The three main ones are given below. + +Iterable-based +~~~~~~~~~~~~~~ + +Wrap ``tqdm()`` around any iterable: + +.. code:: python + + from tqdm import tqdm + from time import sleep + + text = "" + for char in tqdm(["a", "b", "c", "d"]): + sleep(0.25) + text = text + char + +``trange(i)`` is a special optimised instance of ``tqdm(range(i))``: + +.. code:: python + + from tqdm import trange + + for i in trange(100): + sleep(0.01) + +Instantiation outside of the loop allows for manual control over ``tqdm()``: + +.. code:: python + + pbar = tqdm(["a", "b", "c", "d"]) + for char in pbar: + sleep(0.25) + pbar.set_description("Processing %s" % char) + +Manual +~~~~~~ + +Manual control of ``tqdm()`` updates using a ``with`` statement: + +.. code:: python + + with tqdm(total=100) as pbar: + for i in range(10): + sleep(0.1) + pbar.update(10) + +If the optional variable ``total`` (or an iterable with ``len()``) is +provided, predictive stats are displayed. + +``with`` is also optional (you can just assign ``tqdm()`` to a variable, +but in this case don't forget to ``del`` or ``close()`` at the end: + +.. code:: python + + pbar = tqdm(total=100) + for i in range(10): + sleep(0.1) + pbar.update(10) + pbar.close() + +Module +~~~~~~ + +Perhaps the most wonderful use of ``tqdm`` is in a script or on the command +line. Simply inserting ``tqdm`` (or ``python -m tqdm``) between pipes will pass +through all ``stdin`` to ``stdout`` while printing progress to ``stderr``. + +The example below demonstrate counting the number of lines in all Python files +in the current directory, with timing information included. + +.. code:: sh + + $ time find . -name '*.py' -type f -exec cat \{} \; | wc -l + 857365 + + real 0m3.458s + user 0m0.274s + sys 0m3.325s + + $ time find . -name '*.py' -type f -exec cat \{} \; | tqdm | wc -l + 857366it [00:03, 246471.31it/s] + 857365 + + real 0m3.585s + user 0m0.862s + sys 0m3.358s + +Note that the usual arguments for ``tqdm`` can also be specified. + +.. code:: sh + + $ find . -name '*.py' -type f -exec cat \{} \; | + tqdm --unit loc --unit_scale --total 857366 >> /dev/null + 100%|█████████████████████████████████| 857K/857K [00:04<00:00, 246Kloc/s] + +Backing up a large directory? + +.. code:: sh + + $ tar -zcf - docs/ | tqdm --bytes --total `du -sb docs/ | cut -f1` \ + > backup.tgz + 44%|██████████████▊ | 153M/352M [00:14<00:18, 11.0MB/s] + +This can be beautified further: + +.. code:: sh + + $ BYTES=$(du -sb docs/ | cut -f1) + $ tar -cf - docs/ \ + | tqdm --bytes --total "$BYTES" --desc Processing | gzip \ + | tqdm --bytes --total "$BYTES" --desc Compressed --position 1 \ + > ~/backup.tgz + Processing: 100%|██████████████████████| 352M/352M [00:14<00:00, 30.2MB/s] + Compressed: 42%|█████████▎ | 148M/352M [00:14<00:19, 10.9MB/s] + +Or done on a file level using 7-zip: + +.. code:: sh + + $ 7z a -bd -r backup.7z docs/ | grep Compressing \ + | tqdm --total $(find docs/ -type f | wc -l) --unit files \ + | grep -v Compressing + 100%|██████████████████████████▉| 15327/15327 [01:00<00:00, 712.96files/s] + +Pre-existing CLI programs already outputting basic progress information will +benefit from ``tqdm``'s ``--update`` and ``--update_to`` flags: + +.. code:: sh + + $ seq 3 0.1 5 | tqdm --total 5 --update_to --null + 100%|████████████████████████████████████| 5.0/5 [00:00<00:00, 9673.21it/s] + $ seq 10 | tqdm --update --null # 1 + 2 + ... + 10 = 55 iterations + 55it [00:00, 90006.52it/s] + +FAQ and Known Issues +-------------------- + +|GitHub-Issues| + +The most common issues relate to excessive output on multiple lines, instead +of a neat one-line progress bar. + +- Consoles in general: require support for carriage return (``CR``, ``\r``). + + * Some cloud logging consoles which don't support ``\r`` properly + (`cloudwatch `__, + `K8s `__) may benefit from + ``export TQDM_POSITION=-1``. + +- Nested progress bars: + + * Consoles in general: require support for moving cursors up to the + previous line. For example, + `IDLE `__, + `ConEmu `__ and + `PyCharm `__ (also + `here `__, + `here `__, and + `here `__) + lack full support. + * Windows: additionally may require the Python module ``colorama`` + to ensure nested bars stay within their respective lines. + +- Unicode: + + * Environments which report that they support unicode will have solid smooth + progressbars. The fallback is an ``ascii``-only bar. + * Windows consoles often only partially support unicode and thus + `often require explicit ascii=True `__ + (also `here `__). This is due to + either normal-width unicode characters being incorrectly displayed as + "wide", or some unicode characters not rendering. + +- Wrapping generators: + + * Generator wrapper functions tend to hide the length of iterables. + ``tqdm`` does not. + * Replace ``tqdm(enumerate(...))`` with ``enumerate(tqdm(...))`` or + ``tqdm(enumerate(x), total=len(x), ...)``. + The same applies to ``numpy.ndenumerate``. + * Replace ``tqdm(zip(a, b))`` with ``zip(tqdm(a), b)`` or even + ``zip(tqdm(a), tqdm(b))``. + * The same applies to ``itertools``. + * Some useful convenience functions can be found under ``tqdm.contrib``. + +- `No intermediate output in docker-compose `__: + use ``docker-compose run`` instead of ``docker-compose up`` and ``tty: true``. + +- Overriding defaults via environment variables: + e.g. in CI/cloud jobs, ``export TQDM_MININTERVAL=5`` to avoid log spam. + This override logic is handled by the ``tqdm.utils.envwrap`` decorator + (useful independent of ``tqdm``). + +If you come across any other difficulties, browse and file |GitHub-Issues|. + +Documentation +------------- + +|Py-Versions| |README-Hits| (Since 19 May 2016) + +.. code:: python + + class tqdm(): + """ + Decorate an iterable object, returning an iterator which acts exactly + like the original iterable, but prints a dynamically updating + progressbar every time a value is requested. + """ + + @envwrap("TQDM_") # override defaults via env vars + def __init__(self, iterable=None, desc=None, total=None, leave=True, + file=None, ncols=None, mininterval=0.1, + maxinterval=10.0, miniters=None, ascii=None, disable=False, + unit='it', unit_scale=False, dynamic_ncols=False, + smoothing=0.3, bar_format=None, initial=0, position=None, + postfix=None, unit_divisor=1000, write_bytes=False, + lock_args=None, nrows=None, colour=None, delay=0): + +Parameters +~~~~~~~~~~ + +* iterable : iterable, optional + Iterable to decorate with a progressbar. + Leave blank to manually manage the updates. +* desc : str, optional + Prefix for the progressbar. +* total : int or float, optional + The number of expected iterations. If unspecified, + len(iterable) is used if possible. If float("inf") or as a last + resort, only basic progress statistics are displayed + (no ETA, no progressbar). + If ``gui`` is True and this parameter needs subsequent updating, + specify an initial arbitrary large positive number, + e.g. 9e9. +* leave : bool, optional + If [default: True], keeps all traces of the progressbar + upon termination of iteration. + If ``None``, will leave only if ``position`` is ``0``. +* file : ``io.TextIOWrapper`` or ``io.StringIO``, optional + Specifies where to output the progress messages + (default: sys.stderr). Uses ``file.write(str)`` and ``file.flush()`` + methods. For encoding, see ``write_bytes``. +* ncols : int, optional + The width of the entire output message. If specified, + dynamically resizes the progressbar to stay within this bound. + If unspecified, attempts to use environment width. The + fallback is a meter width of 10 and no limit for the counter and + statistics. If 0, will not print any meter (only stats). +* mininterval : float, optional + Minimum progress display update interval [default: 0.1] seconds. +* maxinterval : float, optional + Maximum progress display update interval [default: 10] seconds. + Automatically adjusts ``miniters`` to correspond to ``mininterval`` + after long display update lag. Only works if ``dynamic_miniters`` + or monitor thread is enabled. +* miniters : int or float, optional + Minimum progress display update interval, in iterations. + If 0 and ``dynamic_miniters``, will automatically adjust to equal + ``mininterval`` (more CPU efficient, good for tight loops). + If > 0, will skip display of specified number of iterations. + Tweak this and ``mininterval`` to get very efficient loops. + If your progress is erratic with both fast and slow iterations + (network, skipping items, etc) you should set miniters=1. +* ascii : bool or str, optional + If unspecified or False, use unicode (smooth blocks) to fill + the meter. The fallback is to use ASCII characters " 123456789#". +* disable : bool, optional + Whether to disable the entire progressbar wrapper + [default: False]. If set to None, disable on non-TTY. +* unit : str, optional + String that will be used to define the unit of each iteration + [default: it]. +* unit_scale : bool or int or float, optional + If 1 or True, the number of iterations will be reduced/scaled + automatically and a metric prefix following the + International System of Units standard will be added + (kilo, mega, etc.) [default: False]. If any other non-zero + number, will scale ``total`` and ``n``. +* dynamic_ncols : bool, optional + If set, constantly alters ``ncols`` and ``nrows`` to the + environment (allowing for window resizes) [default: False]. +* smoothing : float, optional + Exponential moving average smoothing factor for speed estimates + (ignored in GUI mode). Ranges from 0 (average speed) to 1 + (current/instantaneous speed) [default: 0.3]. +* bar_format : str, optional + Specify a custom bar string formatting. May impact performance. + [default: '{l_bar}{bar}{r_bar}'], where + l_bar='{desc}: {percentage:3.0f}%|' and + r_bar='| {n_fmt}/{total_fmt} [{elapsed}<{remaining}, ' + '{rate_fmt}{postfix}]' + Possible vars: l_bar, bar, r_bar, n, n_fmt, total, total_fmt, + percentage, elapsed, elapsed_s, ncols, nrows, desc, unit, + rate, rate_fmt, rate_noinv, rate_noinv_fmt, + rate_inv, rate_inv_fmt, postfix, unit_divisor, + remaining, remaining_s, eta. + Note that a trailing ": " is automatically removed after {desc} + if the latter is empty. +* initial : int or float, optional + The initial counter value. Useful when restarting a progress + bar [default: 0]. If using float, consider specifying ``{n:.3f}`` + or similar in ``bar_format``, or specifying ``unit_scale``. +* position : int, optional + Specify the line offset to print this bar (starting from 0) + Automatic if unspecified. + Useful to manage multiple bars at once (eg, from threads). +* postfix : dict or ``*``, optional + Specify additional stats to display at the end of the bar. + Calls ``set_postfix(**postfix)`` if possible (dict). +* unit_divisor : float, optional + [default: 1000], ignored unless ``unit_scale`` is True. +* write_bytes : bool, optional + Whether to write bytes. If (default: False) will write unicode. +* lock_args : tuple, optional + Passed to ``refresh`` for intermediate output + (initialisation, iterating, and updating). +* nrows : int, optional + The screen height. If specified, hides nested bars outside this + bound. If unspecified, attempts to use environment height. + The fallback is 20. +* colour : str, optional + Bar colour (e.g. 'green', '#00ff00'). +* delay : float, optional + Don't display until [default: 0] seconds have elapsed. + +Extra CLI Options +~~~~~~~~~~~~~~~~~ + +* delim : chr, optional + Delimiting character [default: '\n']. Use '\0' for null. + N.B.: on Windows systems, Python converts '\n' to '\r\n'. +* buf_size : int, optional + String buffer size in bytes [default: 256] + used when ``delim`` is specified. +* bytes : bool, optional + If true, will count bytes, ignore ``delim``, and default + ``unit_scale`` to True, ``unit_divisor`` to 1024, and ``unit`` to 'B'. +* tee : bool, optional + If true, passes ``stdin`` to both ``stderr`` and ``stdout``. +* update : bool, optional + If true, will treat input as newly elapsed iterations, + i.e. numbers to pass to ``update()``. Note that this is slow + (~2e5 it/s) since every input must be decoded as a number. +* update_to : bool, optional + If true, will treat input as total elapsed iterations, + i.e. numbers to assign to ``self.n``. Note that this is slow + (~2e5 it/s) since every input must be decoded as a number. +* null : bool, optional + If true, will discard input (no stdout). +* manpath : str, optional + Directory in which to install tqdm man pages. +* comppath : str, optional + Directory in which to place tqdm completion. +* log : str, optional + CRITICAL|FATAL|ERROR|WARN(ING)|[default: 'INFO']|DEBUG|NOTSET. + +Returns +~~~~~~~ + +* out : decorated iterator. + +.. code:: python + + class tqdm(): + def update(self, n=1): + """ + Manually update the progress bar, useful for streams + such as reading files. + E.g.: + >>> t = tqdm(total=filesize) # Initialise + >>> for current_buffer in stream: + ... ... + ... t.update(len(current_buffer)) + >>> t.close() + The last line is highly recommended, but possibly not necessary if + ``t.update()`` will be called in such a way that ``filesize`` will be + exactly reached and printed. + + Parameters + ---------- + n : int or float, optional + Increment to add to the internal counter of iterations + [default: 1]. If using float, consider specifying ``{n:.3f}`` + or similar in ``bar_format``, or specifying ``unit_scale``. + + Returns + ------- + out : bool or None + True if a ``display()`` was triggered. + """ + + def close(self): + """Cleanup and (if leave=False) close the progressbar.""" + + def clear(self, nomove=False): + """Clear current bar display.""" + + def refresh(self): + """ + Force refresh the display of this bar. + + Parameters + ---------- + nolock : bool, optional + If ``True``, does not lock. + If [default: ``False``]: calls ``acquire()`` on internal lock. + lock_args : tuple, optional + Passed to internal lock's ``acquire()``. + If specified, will only ``display()`` if ``acquire()`` returns ``True``. + """ + + def unpause(self): + """Restart tqdm timer from last print time.""" + + def reset(self, total=None): + """ + Resets to 0 iterations for repeated use. + + Consider combining with ``leave=True``. + + Parameters + ---------- + total : int or float, optional. Total to use for the new bar. + """ + + def set_description(self, desc=None, refresh=True): + """ + Set/modify description of the progress bar. + + Parameters + ---------- + desc : str, optional + refresh : bool, optional + Forces refresh [default: True]. + """ + + def set_postfix(self, ordered_dict=None, refresh=True, **tqdm_kwargs): + """ + Set/modify postfix (additional stats) + with automatic formatting based on datatype. + + Parameters + ---------- + ordered_dict : dict or OrderedDict, optional + refresh : bool, optional + Forces refresh [default: True]. + kwargs : dict, optional + """ + + @classmethod + def write(cls, s, file=sys.stdout, end="\n"): + """Print a message via tqdm (without overlap with bars).""" + + @property + def format_dict(self): + """Public API for read-only member access.""" + + def display(self, msg=None, pos=None): + """ + Use ``self.sp`` to display ``msg`` in the specified ``pos``. + + Consider overloading this function when inheriting to use e.g.: + ``self.some_frontend(**self.format_dict)`` instead of ``self.sp``. + + Parameters + ---------- + msg : str, optional. What to display (default: ``repr(self)``). + pos : int, optional. Position to ``moveto`` + (default: ``abs(self.pos)``). + """ + + @classmethod + @contextmanager + def wrapattr(cls, stream, method, total=None, bytes=True, **tqdm_kwargs): + """ + stream : file-like object. + method : str, "read" or "write". The result of ``read()`` and + the first argument of ``write()`` should have a ``len()``. + + >>> with tqdm.wrapattr(file_obj, "read", total=file_obj.size) as fobj: + ... while True: + ... chunk = fobj.read(chunk_size) + ... if not chunk: + ... break + """ + + @classmethod + def pandas(cls, *targs, **tqdm_kwargs): + """Registers the current `tqdm` class with `pandas`.""" + + def trange(*args, **tqdm_kwargs): + """Shortcut for `tqdm(range(*args), **tqdm_kwargs)`.""" + +Convenience Functions +~~~~~~~~~~~~~~~~~~~~~ + +.. code:: python + + def tqdm.contrib.tenumerate(iterable, start=0, total=None, + tqdm_class=tqdm.auto.tqdm, **tqdm_kwargs): + """Equivalent of `numpy.ndenumerate` or builtin `enumerate`.""" + + def tqdm.contrib.tzip(iter1, *iter2plus, **tqdm_kwargs): + """Equivalent of builtin `zip`.""" + + def tqdm.contrib.tmap(function, *sequences, **tqdm_kwargs): + """Equivalent of builtin `map`.""" + +Submodules +~~~~~~~~~~ + +.. code:: python + + class tqdm.notebook.tqdm(tqdm.tqdm): + """IPython/Jupyter Notebook widget.""" + + class tqdm.auto.tqdm(tqdm.tqdm): + """Automatically chooses beween `tqdm.notebook` and `tqdm.tqdm`.""" + + class tqdm.asyncio.tqdm(tqdm.tqdm): + """Asynchronous version.""" + @classmethod + def as_completed(cls, fs, *, loop=None, timeout=None, total=None, + **tqdm_kwargs): + """Wrapper for `asyncio.as_completed`.""" + + class tqdm.gui.tqdm(tqdm.tqdm): + """Matplotlib GUI version.""" + + class tqdm.tk.tqdm(tqdm.tqdm): + """Tkinter GUI version.""" + + class tqdm.rich.tqdm(tqdm.tqdm): + """`rich.progress` version.""" + + class tqdm.keras.TqdmCallback(keras.callbacks.Callback): + """Keras callback for epoch and batch progress.""" + + class tqdm.dask.TqdmCallback(dask.callbacks.Callback): + """Dask callback for task progress.""" + + +``contrib`` ++++++++++++ + +The ``tqdm.contrib`` package also contains experimental modules: + +- ``tqdm.contrib.itertools``: Thin wrappers around ``itertools`` +- ``tqdm.contrib.concurrent``: Thin wrappers around ``concurrent.futures`` +- ``tqdm.contrib.slack``: Posts to `Slack `__ bots +- ``tqdm.contrib.discord``: Posts to `Discord `__ bots +- ``tqdm.contrib.telegram``: Posts to `Telegram `__ bots +- ``tqdm.contrib.bells``: Automagically enables all optional features + + * ``auto``, ``pandas``, ``slack``, ``discord``, ``telegram`` + +Examples and Advanced Usage +--------------------------- + +- See the `examples `__ + folder; +- import the module and run ``help()``; +- consult the `wiki `__; + + * this has an + `excellent article `__ + on how to make a **great** progressbar; + +- check out the `slides from PyData London `__, or +- run the |binder-demo|. + +Description and additional stats +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Custom information can be displayed and updated dynamically on ``tqdm`` bars +with the ``desc`` and ``postfix`` arguments: + +.. code:: python + + from tqdm import tqdm, trange + from random import random, randint + from time import sleep + + with trange(10) as t: + for i in t: + # Description will be displayed on the left + t.set_description('GEN %i' % i) + # Postfix will be displayed on the right, + # formatted automatically based on argument's datatype + t.set_postfix(loss=random(), gen=randint(1,999), str='h', + lst=[1, 2]) + sleep(0.1) + + with tqdm(total=10, bar_format="{postfix[0]} {postfix[1][value]:>8.2g}", + postfix=["Batch", {"value": 0}]) as t: + for i in range(10): + sleep(0.1) + t.postfix[1]["value"] = i / 2 + t.update() + +Points to remember when using ``{postfix[...]}`` in the ``bar_format`` string: + +- ``postfix`` also needs to be passed as an initial argument in a compatible + format, and +- ``postfix`` will be auto-converted to a string if it is a ``dict``-like + object. To prevent this behaviour, insert an extra item into the dictionary + where the key is not a string. + +Additional ``bar_format`` parameters may also be defined by overriding +``format_dict``, and the bar itself may be modified using ``ascii``: + +.. code:: python + + from tqdm import tqdm + class TqdmExtraFormat(tqdm): + """Provides a `total_time` format parameter""" + @property + def format_dict(self): + d = super().format_dict + total_time = d["elapsed"] * (d["total"] or 0) / max(d["n"], 1) + d.update(total_time=self.format_interval(total_time) + " in total") + return d + + for i in TqdmExtraFormat( + range(9), ascii=" .oO0", + bar_format="{total_time}: {percentage:.0f}%|{bar}{r_bar}"): + if i == 4: + break + +.. code:: + + 00:00 in total: 44%|0000. | 4/9 [00:00<00:00, 962.93it/s] + +Note that ``{bar}`` also supports a format specifier ``[width][type]``. + +- ``width`` + + * unspecified (default): automatic to fill ``ncols`` + * ``int >= 0``: fixed width overriding ``ncols`` logic + * ``int < 0``: subtract from the automatic default + +- ``type`` + + * ``a``: ascii (``ascii=True`` override) + * ``u``: unicode (``ascii=False`` override) + * ``b``: blank (``ascii=" "`` override) + +This means a fixed bar with right-justified text may be created by using: +``bar_format="{l_bar}{bar:10}|{bar:-10b}right-justified"`` + +Nested progress bars +~~~~~~~~~~~~~~~~~~~~ + +``tqdm`` supports nested progress bars. Here's an example: + +.. code:: python + + from tqdm.auto import trange + from time import sleep + + for i in trange(4, desc='1st loop'): + for j in trange(5, desc='2nd loop'): + for k in trange(50, desc='3rd loop', leave=False): + sleep(0.01) + +For manual control over positioning (e.g. for multi-processing use), +you may specify ``position=n`` where ``n=0`` for the outermost bar, +``n=1`` for the next, and so on. +However, it's best to check if ``tqdm`` can work without manual ``position`` +first. + +.. code:: python + + from time import sleep + from tqdm import trange, tqdm + from multiprocessing import Pool, RLock, freeze_support + + L = list(range(9)) + + def progresser(n): + interval = 0.001 / (n + 2) + total = 5000 + text = f"#{n}, est. {interval * total:<04.2}s" + for _ in trange(total, desc=text, position=n): + sleep(interval) + + if __name__ == '__main__': + freeze_support() # for Windows support + tqdm.set_lock(RLock()) # for managing output contention + p = Pool(initializer=tqdm.set_lock, initargs=(tqdm.get_lock(),)) + p.map(progresser, L) + +Note that in Python 3, ``tqdm.write`` is thread-safe: + +.. code:: python + + from time import sleep + from tqdm import tqdm, trange + from concurrent.futures import ThreadPoolExecutor + + L = list(range(9)) + + def progresser(n): + interval = 0.001 / (n + 2) + total = 5000 + text = f"#{n}, est. {interval * total:<04.2}s" + for _ in trange(total, desc=text): + sleep(interval) + if n == 6: + tqdm.write("n == 6 completed.") + tqdm.write("`tqdm.write()` is thread-safe in py3!") + + if __name__ == '__main__': + with ThreadPoolExecutor() as p: + p.map(progresser, L) + +Hooks and callbacks +~~~~~~~~~~~~~~~~~~~ + +``tqdm`` can easily support callbacks/hooks and manual updates. +Here's an example with ``urllib``: + +**``urllib.urlretrieve`` documentation** + + | [...] + | If present, the hook function will be called once + | on establishment of the network connection and once after each block read + | thereafter. The hook will be passed three arguments; a count of blocks + | transferred so far, a block size in bytes, and the total size of the file. + | [...] + +.. code:: python + + import urllib, os + from tqdm import tqdm + urllib = getattr(urllib, 'request', urllib) + + class TqdmUpTo(tqdm): + """Provides `update_to(n)` which uses `tqdm.update(delta_n)`.""" + def update_to(self, b=1, bsize=1, tsize=None): + """ + b : int, optional + Number of blocks transferred so far [default: 1]. + bsize : int, optional + Size of each block (in tqdm units) [default: 1]. + tsize : int, optional + Total size (in tqdm units). If [default: None] remains unchanged. + """ + if tsize is not None: + self.total = tsize + return self.update(b * bsize - self.n) # also sets self.n = b * bsize + + eg_link = "https://caspersci.uk.to/matryoshka.zip" + with TqdmUpTo(unit='B', unit_scale=True, unit_divisor=1024, miniters=1, + desc=eg_link.split('/')[-1]) as t: # all optional kwargs + urllib.urlretrieve(eg_link, filename=os.devnull, + reporthook=t.update_to, data=None) + t.total = t.n + +Inspired by `twine#242 `__. +Functional alternative in +`examples/tqdm_wget.py `__. + +It is recommend to use ``miniters=1`` whenever there is potentially +large differences in iteration speed (e.g. downloading a file over +a patchy connection). + +**Wrapping read/write methods** + +To measure throughput through a file-like object's ``read`` or ``write`` +methods, use ``CallbackIOWrapper``: + +.. code:: python + + from tqdm.auto import tqdm + from tqdm.utils import CallbackIOWrapper + + with tqdm(total=file_obj.size, + unit='B', unit_scale=True, unit_divisor=1024) as t: + fobj = CallbackIOWrapper(t.update, file_obj, "read") + while True: + chunk = fobj.read(chunk_size) + if not chunk: + break + t.reset() + # ... continue to use `t` for something else + +Alternatively, use the even simpler ``wrapattr`` convenience function, +which would condense both the ``urllib`` and ``CallbackIOWrapper`` examples +down to: + +.. code:: python + + import urllib, os + from tqdm import tqdm + + eg_link = "https://caspersci.uk.to/matryoshka.zip" + response = getattr(urllib, 'request', urllib).urlopen(eg_link) + with tqdm.wrapattr(open(os.devnull, "wb"), "write", + miniters=1, desc=eg_link.split('/')[-1], + total=getattr(response, 'length', None)) as fout: + for chunk in response: + fout.write(chunk) + +The ``requests`` equivalent is nearly identical: + +.. code:: python + + import requests, os + from tqdm import tqdm + + eg_link = "https://caspersci.uk.to/matryoshka.zip" + response = requests.get(eg_link, stream=True) + with tqdm.wrapattr(open(os.devnull, "wb"), "write", + miniters=1, desc=eg_link.split('/')[-1], + total=int(response.headers.get('content-length', 0))) as fout: + for chunk in response.iter_content(chunk_size=4096): + fout.write(chunk) + +**Custom callback** + +``tqdm`` is known for intelligently skipping unnecessary displays. To make a +custom callback take advantage of this, simply use the return value of +``update()``. This is set to ``True`` if a ``display()`` was triggered. + +.. code:: python + + from tqdm.auto import tqdm as std_tqdm + + def external_callback(*args, **kwargs): + ... + + class TqdmExt(std_tqdm): + def update(self, n=1): + displayed = super().update(n) + if displayed: + external_callback(**self.format_dict) + return displayed + +``asyncio`` +~~~~~~~~~~~ + +Note that ``break`` isn't currently caught by asynchronous iterators. +This means that ``tqdm`` cannot clean up after itself in this case: + +.. code:: python + + from tqdm.asyncio import tqdm + + async for i in tqdm(range(9)): + if i == 2: + break + +Instead, either call ``pbar.close()`` manually or use the context manager syntax: + +.. code:: python + + from tqdm.asyncio import tqdm + + with tqdm(range(9)) as pbar: + async for i in pbar: + if i == 2: + break + +Pandas Integration +~~~~~~~~~~~~~~~~~~ + +Due to popular demand we've added support for ``pandas`` -- here's an example +for ``DataFrame.progress_apply`` and ``DataFrameGroupBy.progress_apply``: + +.. code:: python + + import pandas as pd + import numpy as np + from tqdm import tqdm + + df = pd.DataFrame(np.random.randint(0, 100, (100000, 6))) + + # Register `pandas.progress_apply` and `pandas.Series.map_apply` with `tqdm` + # (can use `tqdm.gui.tqdm`, `tqdm.notebook.tqdm`, optional kwargs, etc.) + tqdm.pandas(desc="my bar!") + + # Now you can use `progress_apply` instead of `apply` + # and `progress_map` instead of `map` + df.progress_apply(lambda x: x**2) + # can also groupby: + # df.groupby(0).progress_apply(lambda x: x**2) + +In case you're interested in how this works (and how to modify it for your +own callbacks), see the +`examples `__ +folder or import the module and run ``help()``. + +Keras Integration +~~~~~~~~~~~~~~~~~ + +A ``keras`` callback is also available: + +.. code:: python + + from tqdm.keras import TqdmCallback + + ... + + model.fit(..., verbose=0, callbacks=[TqdmCallback()]) + +Dask Integration +~~~~~~~~~~~~~~~~ + +A ``dask`` callback is also available: + +.. code:: python + + from tqdm.dask import TqdmCallback + + with TqdmCallback(desc="compute"): + ... + arr.compute() + + # or use callback globally + cb = TqdmCallback(desc="global") + cb.register() + arr.compute() + +IPython/Jupyter Integration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +IPython/Jupyter is supported via the ``tqdm.notebook`` submodule: + +.. code:: python + + from tqdm.notebook import trange, tqdm + from time import sleep + + for i in trange(3, desc='1st loop'): + for j in tqdm(range(100), desc='2nd loop'): + sleep(0.01) + +In addition to ``tqdm`` features, the submodule provides a native Jupyter +widget (compatible with IPython v1-v4 and Jupyter), fully working nested bars +and colour hints (blue: normal, green: completed, red: error/interrupt, +light blue: no ETA); as demonstrated below. + +|Screenshot-Jupyter1| +|Screenshot-Jupyter2| +|Screenshot-Jupyter3| + +The ``notebook`` version supports percentage or pixels for overall width +(e.g.: ``ncols='100%'`` or ``ncols='480px'``). + +It is also possible to let ``tqdm`` automatically choose between +console or notebook versions by using the ``autonotebook`` submodule: + +.. code:: python + + from tqdm.autonotebook import tqdm + tqdm.pandas() + +Note that this will issue a ``TqdmExperimentalWarning`` if run in a notebook +since it is not meant to be possible to distinguish between ``jupyter notebook`` +and ``jupyter console``. Use ``auto`` instead of ``autonotebook`` to suppress +this warning. + +Note that notebooks will display the bar in the cell where it was created. +This may be a different cell from the one where it is used. +If this is not desired, either + +- delay the creation of the bar to the cell where it must be displayed, or +- create the bar with ``display=False``, and in a later cell call + ``display(bar.container)``: + +.. code:: python + + from tqdm.notebook import tqdm + pbar = tqdm(..., display=False) + +.. code:: python + + # different cell + display(pbar.container) + +The ``keras`` callback has a ``display()`` method which can be used likewise: + +.. code:: python + + from tqdm.keras import TqdmCallback + cbk = TqdmCallback(display=False) + +.. code:: python + + # different cell + cbk.display() + model.fit(..., verbose=0, callbacks=[cbk]) + +Another possibility is to have a single bar (near the top of the notebook) +which is constantly re-used (using ``reset()`` rather than ``close()``). +For this reason, the notebook version (unlike the CLI version) does not +automatically call ``close()`` upon ``Exception``. + +.. code:: python + + from tqdm.notebook import tqdm + pbar = tqdm() + +.. code:: python + + # different cell + iterable = range(100) + pbar.reset(total=len(iterable)) # initialise with new `total` + for i in iterable: + pbar.update() + pbar.refresh() # force print final status but don't `close()` + +Custom Integration +~~~~~~~~~~~~~~~~~~ + +To change the default arguments (such as making ``dynamic_ncols=True``), +simply use built-in Python magic: + +.. code:: python + + from functools import partial + from tqdm import tqdm as std_tqdm + tqdm = partial(std_tqdm, dynamic_ncols=True) + +For further customisation, +``tqdm`` may be inherited from to create custom callbacks (as with the +``TqdmUpTo`` example `above <#hooks-and-callbacks>`__) or for custom frontends +(e.g. GUIs such as notebook or plotting packages). In the latter case: + +1. ``def __init__()`` to call ``super().__init__(..., gui=True)`` to disable + terminal ``status_printer`` creation. +2. Redefine: ``close()``, ``clear()``, ``display()``. + +Consider overloading ``display()`` to use e.g. +``self.frontend(**self.format_dict)`` instead of ``self.sp(repr(self))``. + +Some submodule examples of inheritance: + +- `tqdm/notebook.py `__ +- `tqdm/gui.py `__ +- `tqdm/tk.py `__ +- `tqdm/contrib/slack.py `__ +- `tqdm/contrib/discord.py `__ +- `tqdm/contrib/telegram.py `__ + +Dynamic Monitor/Meter +~~~~~~~~~~~~~~~~~~~~~ + +You can use a ``tqdm`` as a meter which is not monotonically increasing. +This could be because ``n`` decreases (e.g. a CPU usage monitor) or ``total`` +changes. + +One example would be recursively searching for files. The ``total`` is the +number of objects found so far, while ``n`` is the number of those objects which +are files (rather than folders): + +.. code:: python + + from tqdm import tqdm + import os.path + + def find_files_recursively(path, show_progress=True): + files = [] + # total=1 assumes `path` is a file + t = tqdm(total=1, unit="file", disable=not show_progress) + if not os.path.exists(path): + raise IOError("Cannot find:" + path) + + def append_found_file(f): + files.append(f) + t.update() + + def list_found_dir(path): + """returns os.listdir(path) assuming os.path.isdir(path)""" + listing = os.listdir(path) + # subtract 1 since a "file" we found was actually this directory + t.total += len(listing) - 1 + # fancy way to give info without forcing a refresh + t.set_postfix(dir=path[-10:], refresh=False) + t.update(0) # may trigger a refresh + return listing + + def recursively_search(path): + if os.path.isdir(path): + for f in list_found_dir(path): + recursively_search(os.path.join(path, f)) + else: + append_found_file(path) + + recursively_search(path) + t.set_postfix(dir=path) + t.close() + return files + +Using ``update(0)`` is a handy way to let ``tqdm`` decide when to trigger a +display refresh to avoid console spamming. + +Writing messages +~~~~~~~~~~~~~~~~ + +This is a work in progress (see +`#737 `__). + +Since ``tqdm`` uses a simple printing mechanism to display progress bars, +you should not write any message in the terminal using ``print()`` while +a progressbar is open. + +To write messages in the terminal without any collision with ``tqdm`` bar +display, a ``.write()`` method is provided: + +.. code:: python + + from tqdm.auto import tqdm, trange + from time import sleep + + bar = trange(10) + for i in bar: + # Print using tqdm class method .write() + sleep(0.1) + if not (i % 3): + tqdm.write("Done task %i" % i) + # Can also use bar.write() + +By default, this will print to standard output ``sys.stdout``. but you can +specify any file-like object using the ``file`` argument. For example, this +can be used to redirect the messages writing to a log file or class. + +Redirecting writing +~~~~~~~~~~~~~~~~~~~ + +If using a library that can print messages to the console, editing the library +by replacing ``print()`` with ``tqdm.write()`` may not be desirable. +In that case, redirecting ``sys.stdout`` to ``tqdm.write()`` is an option. + +To redirect ``sys.stdout``, create a file-like class that will write +any input string to ``tqdm.write()``, and supply the arguments +``file=sys.stdout, dynamic_ncols=True``. + +A reusable canonical example is given below: + +.. code:: python + + from time import sleep + import contextlib + import sys + from tqdm import tqdm + from tqdm.contrib import DummyTqdmFile + + + @contextlib.contextmanager + def std_out_err_redirect_tqdm(): + orig_out_err = sys.stdout, sys.stderr + try: + sys.stdout, sys.stderr = map(DummyTqdmFile, orig_out_err) + yield orig_out_err[0] + # Relay exceptions + except Exception as exc: + raise exc + # Always restore sys.stdout/err if necessary + finally: + sys.stdout, sys.stderr = orig_out_err + + def some_fun(i): + print("Fee, fi, fo,".split()[i]) + + # Redirect stdout to tqdm.write() (don't forget the `as save_stdout`) + with std_out_err_redirect_tqdm() as orig_stdout: + # tqdm needs the original stdout + # and dynamic_ncols=True to autodetect console width + for i in tqdm(range(3), file=orig_stdout, dynamic_ncols=True): + sleep(.5) + some_fun(i) + + # After the `with`, printing is restored + print("Done!") + +Redirecting ``logging`` +~~~~~~~~~~~~~~~~~~~~~~~ + +Similar to ``sys.stdout``/``sys.stderr`` as detailed above, console ``logging`` +may also be redirected to ``tqdm.write()``. + +Warning: if also redirecting ``sys.stdout``/``sys.stderr``, make sure to +redirect ``logging`` first if needed. + +Helper methods are available in ``tqdm.contrib.logging``. For example: + +.. code:: python + + import logging + from tqdm import trange + from tqdm.contrib.logging import logging_redirect_tqdm + + LOG = logging.getLogger(__name__) + + if __name__ == '__main__': + logging.basicConfig(level=logging.INFO) + with logging_redirect_tqdm(): + for i in trange(9): + if i == 4: + LOG.info("console logging redirected to `tqdm.write()`") + # logging restored + +Monitoring thread, intervals and miniters +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``tqdm`` implements a few tricks to increase efficiency and reduce overhead. + +- Avoid unnecessary frequent bar refreshing: ``mininterval`` defines how long + to wait between each refresh. ``tqdm`` always gets updated in the background, + but it will display only every ``mininterval``. +- Reduce number of calls to check system clock/time. +- ``mininterval`` is more intuitive to configure than ``miniters``. + A clever adjustment system ``dynamic_miniters`` will automatically adjust + ``miniters`` to the amount of iterations that fit into time ``mininterval``. + Essentially, ``tqdm`` will check if it's time to print without actually + checking time. This behaviour can be still be bypassed by manually setting + ``miniters``. + +However, consider a case with a combination of fast and slow iterations. +After a few fast iterations, ``dynamic_miniters`` will set ``miniters`` to a +large number. When iteration rate subsequently slows, ``miniters`` will +remain large and thus reduce display update frequency. To address this: + +- ``maxinterval`` defines the maximum time between display refreshes. + A concurrent monitoring thread checks for overdue updates and forces one + where necessary. + +The monitoring thread should not have a noticeable overhead, and guarantees +updates at least every 10 seconds by default. +This value can be directly changed by setting the ``monitor_interval`` of +any ``tqdm`` instance (i.e. ``t = tqdm.tqdm(...); t.monitor_interval = 2``). +The monitor thread may be disabled application-wide by setting +``tqdm.tqdm.monitor_interval = 0`` before instantiation of any ``tqdm`` bar. + + +Merch +----- + +You can buy `tqdm branded merch `__ now! + +Contributions +------------- + +|GitHub-Commits| |GitHub-Issues| |GitHub-PRs| |OpenHub-Status| |GitHub-Contributions| |CII Best Practices| + +All source code is hosted on `GitHub `__. +Contributions are welcome. + +See the +`CONTRIBUTING `__ +file for more information. + +Developers who have made significant contributions, ranked by *SLoC* +(surviving lines of code, +`git fame `__ ``-wMC --excl '\.(png|gif|jpg)$'``), +are: + +==================== ======================================================== ==== ================================ +Name ID SLoC Notes +==================== ======================================================== ==== ================================ +Casper da Costa-Luis `casperdcl `__ ~80% primary maintainer |Gift-Casper| +Stephen Larroque `lrq3000 `__ ~9% team member +Martin Zugnoni `martinzugnoni `__ ~3% +Daniel Ecer `de-code `__ ~2% +Richard Sheridan `richardsheridan `__ ~1% +Guangshuo Chen `chengs `__ ~1% +Helio Machado `0x2b3bfa0 `__ ~1% +Kyle Altendorf `altendky `__ <1% +Noam Yorav-Raphael `noamraph `__ <1% original author +Matthew Stevens `mjstevens777 `__ <1% +Hadrien Mary `hadim `__ <1% team member +Mikhail Korobov `kmike `__ <1% team member +==================== ======================================================== ==== ================================ + +Ports to Other Languages +~~~~~~~~~~~~~~~~~~~~~~~~ + +A list is available on +`this wiki page `__. + + +LICENCE +------- + +Open Source (OSI approved): |LICENCE| + +Citation information: |DOI| + +|README-Hits| (Since 19 May 2016) + +.. |Logo| image:: https://tqdm.github.io/img/logo.gif +.. |Screenshot| image:: https://tqdm.github.io/img/tqdm.gif +.. |Video| image:: https://tqdm.github.io/img/video.jpg + :target: https://tqdm.github.io/video +.. |Slides| image:: https://tqdm.github.io/img/slides.jpg + :target: https://tqdm.github.io/PyData2019/slides.html +.. |Merch| image:: https://tqdm.github.io/img/merch.jpg + :target: https://tqdm.github.io/merch +.. |Build-Status| image:: https://img.shields.io/github/actions/workflow/status/tqdm/tqdm/test.yml?branch=master&label=tqdm&logo=GitHub + :target: https://github.com/tqdm/tqdm/actions/workflows/test.yml +.. |Coverage-Status| image:: https://img.shields.io/coveralls/github/tqdm/tqdm/master?logo=coveralls + :target: https://coveralls.io/github/tqdm/tqdm +.. |Branch-Coverage-Status| image:: https://codecov.io/gh/tqdm/tqdm/branch/master/graph/badge.svg + :target: https://codecov.io/gh/tqdm/tqdm +.. |Codacy-Grade| image:: https://app.codacy.com/project/badge/Grade/3f965571598f44549c7818f29cdcf177 + :target: https://www.codacy.com/gh/tqdm/tqdm/dashboard +.. |CII Best Practices| image:: https://bestpractices.coreinfrastructure.org/projects/3264/badge + :target: https://bestpractices.coreinfrastructure.org/projects/3264 +.. |GitHub-Status| image:: https://img.shields.io/github/tag/tqdm/tqdm.svg?maxAge=86400&logo=github&logoColor=white + :target: https://github.com/tqdm/tqdm/releases +.. |GitHub-Forks| image:: https://img.shields.io/github/forks/tqdm/tqdm.svg?logo=github&logoColor=white + :target: https://github.com/tqdm/tqdm/network +.. |GitHub-Stars| image:: https://img.shields.io/github/stars/tqdm/tqdm.svg?logo=github&logoColor=white + :target: https://github.com/tqdm/tqdm/stargazers +.. |GitHub-Commits| image:: https://img.shields.io/github/commit-activity/y/tqdm/tqdm.svg?logo=git&logoColor=white + :target: https://github.com/tqdm/tqdm/graphs/commit-activity +.. |GitHub-Issues| image:: https://img.shields.io/github/issues-closed/tqdm/tqdm.svg?logo=github&logoColor=white + :target: https://github.com/tqdm/tqdm/issues?q= +.. |GitHub-PRs| image:: https://img.shields.io/github/issues-pr-closed/tqdm/tqdm.svg?logo=github&logoColor=white + :target: https://github.com/tqdm/tqdm/pulls +.. |GitHub-Contributions| image:: https://img.shields.io/github/contributors/tqdm/tqdm.svg?logo=github&logoColor=white + :target: https://github.com/tqdm/tqdm/graphs/contributors +.. |GitHub-Updated| image:: https://img.shields.io/github/last-commit/tqdm/tqdm/master.svg?logo=github&logoColor=white&label=pushed + :target: https://github.com/tqdm/tqdm/pulse +.. |Gift-Casper| image:: https://img.shields.io/badge/dynamic/json.svg?color=ff69b4&label=gifts%20received&prefix=%C2%A3&query=%24..sum&url=https%3A%2F%2Fcaspersci.uk.to%2Fgifts.json + :target: https://cdcl.ml/sponsor +.. |Versions| image:: https://img.shields.io/pypi/v/tqdm.svg + :target: https://tqdm.github.io/releases +.. |PyPI-Downloads| image:: https://img.shields.io/pypi/dm/tqdm.svg?label=pypi%20downloads&logo=PyPI&logoColor=white + :target: https://pepy.tech/project/tqdm +.. |Py-Versions| image:: https://img.shields.io/pypi/pyversions/tqdm.svg?logo=python&logoColor=white + :target: https://pypi.org/project/tqdm +.. |Conda-Forge-Status| image:: https://img.shields.io/conda/v/conda-forge/tqdm.svg?label=conda-forge&logo=conda-forge + :target: https://anaconda.org/conda-forge/tqdm +.. |Snapcraft| image:: https://img.shields.io/badge/snap-install-82BEA0.svg?logo=snapcraft + :target: https://snapcraft.io/tqdm +.. |Docker| image:: https://img.shields.io/badge/docker-pull-blue.svg?logo=docker&logoColor=white + :target: https://hub.docker.com/r/tqdm/tqdm +.. |Libraries-Rank| image:: https://img.shields.io/librariesio/sourcerank/pypi/tqdm.svg?logo=koding&logoColor=white + :target: https://libraries.io/pypi/tqdm +.. |Libraries-Dependents| image:: https://img.shields.io/librariesio/dependent-repos/pypi/tqdm.svg?logo=koding&logoColor=white + :target: https://github.com/tqdm/tqdm/network/dependents +.. |OpenHub-Status| image:: https://www.openhub.net/p/tqdm/widgets/project_thin_badge?format=gif + :target: https://www.openhub.net/p/tqdm?ref=Thin+badge +.. |awesome-python| image:: https://awesome.re/mentioned-badge.svg + :target: https://github.com/vinta/awesome-python +.. |LICENCE| image:: https://img.shields.io/pypi/l/tqdm.svg + :target: https://raw.githubusercontent.com/tqdm/tqdm/master/LICENCE +.. |DOI| image:: https://img.shields.io/badge/DOI-10.5281/zenodo.595120-blue.svg + :target: https://doi.org/10.5281/zenodo.595120 +.. |binder-demo| image:: https://mybinder.org/badge_logo.svg + :target: https://mybinder.org/v2/gh/tqdm/tqdm/master?filepath=DEMO.ipynb +.. |Screenshot-Jupyter1| image:: https://tqdm.github.io/img/jupyter-1.gif +.. |Screenshot-Jupyter2| image:: https://tqdm.github.io/img/jupyter-2.gif +.. |Screenshot-Jupyter3| image:: https://tqdm.github.io/img/jupyter-3.gif +.. |README-Hits| image:: https://cgi.cdcl.ml/hits?q=tqdm&style=social&r=https://github.com/tqdm/tqdm&l=https://tqdm.github.io/img/favicon.png&f=https://tqdm.github.io/img/logo.gif + :target: https://cgi.cdcl.ml/hits?q=tqdm&a=plot&r=https://github.com/tqdm/tqdm&l=https://tqdm.github.io/img/favicon.png&f=https://tqdm.github.io/img/logo.gif&style=social diff --git a/venv/lib/python3.13/site-packages/tqdm-4.67.1.dist-info/entry_points.txt b/venv/lib/python3.13/site-packages/tqdm-4.67.1.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..540e60f4e073bc53a5f0a521a3639e0d80780af4 --- /dev/null +++ b/venv/lib/python3.13/site-packages/tqdm-4.67.1.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +tqdm = tqdm.cli:main diff --git a/venv/lib/python3.13/site-packages/transformers-4.57.2.dist-info/INSTALLER b/venv/lib/python3.13/site-packages/transformers-4.57.2.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.13/site-packages/transformers-4.57.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.13/site-packages/transformers-4.57.2.dist-info/METADATA b/venv/lib/python3.13/site-packages/transformers-4.57.2.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..c4efc61aaeec05c1655e905e1bae15cc52b8732d --- /dev/null +++ b/venv/lib/python3.13/site-packages/transformers-4.57.2.dist-info/METADATA @@ -0,0 +1,881 @@ +Metadata-Version: 2.4 +Name: transformers +Version: 4.57.2 +Summary: State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow +Home-page: https://github.com/huggingface/transformers +Author: The Hugging Face team (past and future) with the help of all our contributors (https://github.com/huggingface/transformers/graphs/contributors) +Author-email: transformers@huggingface.co +License: Apache 2.0 License +Keywords: NLP vision speech deep learning transformer pytorch tensorflow jax BERT GPT-2 Wav2Vec2 ViT +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Education +Classifier: Intended Audience :: Science/Research +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence +Requires-Python: >=3.9.0 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: filelock +Requires-Dist: huggingface-hub<1.0,>=0.34.0 +Requires-Dist: numpy>=1.17 +Requires-Dist: packaging>=20.0 +Requires-Dist: pyyaml>=5.1 +Requires-Dist: regex!=2019.12.17 +Requires-Dist: requests +Requires-Dist: tokenizers<=0.23.0,>=0.22.0 +Requires-Dist: safetensors>=0.4.3 +Requires-Dist: tqdm>=4.27 +Provides-Extra: ja +Requires-Dist: fugashi>=1.0; extra == "ja" +Requires-Dist: ipadic<2.0,>=1.0.0; extra == "ja" +Requires-Dist: unidic_lite>=1.0.7; extra == "ja" +Requires-Dist: unidic>=1.0.2; extra == "ja" +Requires-Dist: sudachipy>=0.6.6; extra == "ja" +Requires-Dist: sudachidict_core>=20220729; extra == "ja" +Requires-Dist: rhoknp<1.3.1,>=1.1.0; extra == "ja" +Provides-Extra: sklearn +Requires-Dist: scikit-learn; extra == "sklearn" +Provides-Extra: tf +Requires-Dist: tensorflow<2.16,>2.9; extra == "tf" +Requires-Dist: onnxconverter-common; extra == "tf" +Requires-Dist: tf2onnx; extra == "tf" +Requires-Dist: tensorflow-text<2.16; extra == "tf" +Requires-Dist: keras-nlp<0.14.0,>=0.3.1; extra == "tf" +Provides-Extra: tf-cpu +Requires-Dist: keras<2.16,>2.9; extra == "tf-cpu" +Requires-Dist: tensorflow-cpu<2.16,>2.9; extra == "tf-cpu" +Requires-Dist: onnxconverter-common; extra == "tf-cpu" +Requires-Dist: tf2onnx; extra == "tf-cpu" +Requires-Dist: tensorflow-text<2.16; extra == "tf-cpu" +Requires-Dist: keras-nlp<0.14.0,>=0.3.1; extra == "tf-cpu" +Requires-Dist: tensorflow-probability<0.24; extra == "tf-cpu" +Provides-Extra: torch +Requires-Dist: torch>=2.2; extra == "torch" +Requires-Dist: accelerate>=0.26.0; extra == "torch" +Provides-Extra: accelerate +Requires-Dist: accelerate>=0.26.0; extra == "accelerate" +Provides-Extra: hf-xet +Requires-Dist: hf_xet; extra == "hf-xet" +Provides-Extra: retrieval +Requires-Dist: faiss-cpu; extra == "retrieval" +Requires-Dist: datasets>=2.15.0; extra == "retrieval" +Provides-Extra: flax +Requires-Dist: jax<=0.4.13,>=0.4.1; extra == "flax" +Requires-Dist: jaxlib<=0.4.13,>=0.4.1; extra == "flax" +Requires-Dist: flax<=0.7.0,>=0.4.1; extra == "flax" +Requires-Dist: optax<=0.1.4,>=0.0.8; extra == "flax" +Requires-Dist: scipy<1.13.0; extra == "flax" +Provides-Extra: tokenizers +Requires-Dist: tokenizers<=0.23.0,>=0.22.0; extra == "tokenizers" +Provides-Extra: ftfy +Requires-Dist: ftfy; extra == "ftfy" +Provides-Extra: onnxruntime +Requires-Dist: onnxruntime>=1.4.0; extra == "onnxruntime" +Requires-Dist: onnxruntime-tools>=1.4.2; extra == "onnxruntime" +Provides-Extra: onnx +Requires-Dist: onnxconverter-common; extra == "onnx" +Requires-Dist: tf2onnx; extra == "onnx" +Requires-Dist: onnxruntime>=1.4.0; extra == "onnx" +Requires-Dist: onnxruntime-tools>=1.4.2; extra == "onnx" +Provides-Extra: modelcreation +Requires-Dist: cookiecutter==1.7.3; extra == "modelcreation" +Provides-Extra: sagemaker +Requires-Dist: sagemaker>=2.31.0; extra == "sagemaker" +Provides-Extra: deepspeed +Requires-Dist: deepspeed>=0.9.3; extra == "deepspeed" +Requires-Dist: accelerate>=0.26.0; extra == "deepspeed" +Provides-Extra: optuna +Requires-Dist: optuna; extra == "optuna" +Provides-Extra: ray +Requires-Dist: ray[tune]>=2.7.0; extra == "ray" +Provides-Extra: sigopt +Requires-Dist: sigopt; extra == "sigopt" +Provides-Extra: hub-kernels +Requires-Dist: kernels<=0.9,>=0.6.1; extra == "hub-kernels" +Provides-Extra: integrations +Requires-Dist: kernels<=0.9,>=0.6.1; extra == "integrations" +Requires-Dist: optuna; extra == "integrations" +Requires-Dist: ray[tune]>=2.7.0; extra == "integrations" +Provides-Extra: serving +Requires-Dist: openai>=1.98.0; extra == "serving" +Requires-Dist: pydantic>=2; extra == "serving" +Requires-Dist: uvicorn; extra == "serving" +Requires-Dist: fastapi; extra == "serving" +Requires-Dist: starlette; extra == "serving" +Requires-Dist: torch>=2.2; extra == "serving" +Requires-Dist: accelerate>=0.26.0; extra == "serving" +Provides-Extra: audio +Requires-Dist: librosa; extra == "audio" +Requires-Dist: pyctcdecode>=0.4.0; extra == "audio" +Requires-Dist: phonemizer; extra == "audio" +Requires-Dist: kenlm; extra == "audio" +Provides-Extra: speech +Requires-Dist: torchaudio; extra == "speech" +Requires-Dist: librosa; extra == "speech" +Requires-Dist: pyctcdecode>=0.4.0; extra == "speech" +Requires-Dist: phonemizer; extra == "speech" +Requires-Dist: kenlm; extra == "speech" +Provides-Extra: torch-speech +Requires-Dist: torchaudio; extra == "torch-speech" +Requires-Dist: librosa; extra == "torch-speech" +Requires-Dist: pyctcdecode>=0.4.0; extra == "torch-speech" +Requires-Dist: phonemizer; extra == "torch-speech" +Requires-Dist: kenlm; extra == "torch-speech" +Provides-Extra: tf-speech +Requires-Dist: librosa; extra == "tf-speech" +Requires-Dist: pyctcdecode>=0.4.0; extra == "tf-speech" +Requires-Dist: phonemizer; extra == "tf-speech" +Requires-Dist: kenlm; extra == "tf-speech" +Provides-Extra: flax-speech +Requires-Dist: librosa; extra == "flax-speech" +Requires-Dist: pyctcdecode>=0.4.0; extra == "flax-speech" +Requires-Dist: phonemizer; extra == "flax-speech" +Requires-Dist: kenlm; extra == "flax-speech" +Provides-Extra: vision +Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "vision" +Provides-Extra: timm +Requires-Dist: timm!=1.0.18,<=1.0.19; extra == "timm" +Provides-Extra: torch-vision +Requires-Dist: torchvision; extra == "torch-vision" +Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "torch-vision" +Provides-Extra: natten +Requires-Dist: natten<0.15.0,>=0.14.6; extra == "natten" +Provides-Extra: codecarbon +Requires-Dist: codecarbon>=2.8.1; extra == "codecarbon" +Provides-Extra: video +Requires-Dist: av; extra == "video" +Provides-Extra: num2words +Requires-Dist: num2words; extra == "num2words" +Provides-Extra: sentencepiece +Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "sentencepiece" +Requires-Dist: protobuf; extra == "sentencepiece" +Provides-Extra: tiktoken +Requires-Dist: tiktoken; extra == "tiktoken" +Requires-Dist: blobfile; extra == "tiktoken" +Provides-Extra: mistral-common +Requires-Dist: mistral-common[opencv]>=1.6.3; extra == "mistral-common" +Provides-Extra: chat-template +Requires-Dist: jinja2>=3.1.0; extra == "chat-template" +Provides-Extra: testing +Requires-Dist: pytest>=7.2.0; extra == "testing" +Requires-Dist: pytest-asyncio; extra == "testing" +Requires-Dist: pytest-rich; extra == "testing" +Requires-Dist: pytest-xdist; extra == "testing" +Requires-Dist: pytest-order; extra == "testing" +Requires-Dist: pytest-rerunfailures<16.0; extra == "testing" +Requires-Dist: timeout-decorator; extra == "testing" +Requires-Dist: parameterized>=0.9; extra == "testing" +Requires-Dist: psutil; extra == "testing" +Requires-Dist: datasets>=2.15.0; extra == "testing" +Requires-Dist: dill<0.3.5; extra == "testing" +Requires-Dist: evaluate>=0.2.0; extra == "testing" +Requires-Dist: pytest-timeout; extra == "testing" +Requires-Dist: ruff==0.13.1; extra == "testing" +Requires-Dist: rouge-score!=0.0.7,!=0.0.8,!=0.1,!=0.1.1; extra == "testing" +Requires-Dist: nltk<=3.8.1; extra == "testing" +Requires-Dist: GitPython<3.1.19; extra == "testing" +Requires-Dist: sacremoses; extra == "testing" +Requires-Dist: rjieba; extra == "testing" +Requires-Dist: beautifulsoup4; extra == "testing" +Requires-Dist: tensorboard; extra == "testing" +Requires-Dist: pydantic>=2; extra == "testing" +Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "testing" +Requires-Dist: sacrebleu<2.0.0,>=1.4.12; extra == "testing" +Requires-Dist: libcst; extra == "testing" +Requires-Dist: faiss-cpu; extra == "testing" +Requires-Dist: datasets>=2.15.0; extra == "testing" +Requires-Dist: cookiecutter==1.7.3; extra == "testing" +Requires-Dist: mistral-common[opencv]>=1.6.3; extra == "testing" +Requires-Dist: openai>=1.98.0; extra == "testing" +Requires-Dist: pydantic>=2; extra == "testing" +Requires-Dist: uvicorn; extra == "testing" +Requires-Dist: fastapi; extra == "testing" +Requires-Dist: starlette; extra == "testing" +Requires-Dist: torch>=2.2; extra == "testing" +Requires-Dist: accelerate>=0.26.0; extra == "testing" +Provides-Extra: deepspeed-testing +Requires-Dist: deepspeed>=0.9.3; extra == "deepspeed-testing" +Requires-Dist: accelerate>=0.26.0; extra == "deepspeed-testing" +Requires-Dist: pytest>=7.2.0; extra == "deepspeed-testing" +Requires-Dist: pytest-asyncio; extra == "deepspeed-testing" +Requires-Dist: pytest-rich; extra == "deepspeed-testing" +Requires-Dist: pytest-xdist; extra == "deepspeed-testing" +Requires-Dist: pytest-order; extra == "deepspeed-testing" +Requires-Dist: pytest-rerunfailures<16.0; extra == "deepspeed-testing" +Requires-Dist: timeout-decorator; extra == "deepspeed-testing" +Requires-Dist: parameterized>=0.9; extra == "deepspeed-testing" +Requires-Dist: psutil; extra == "deepspeed-testing" +Requires-Dist: datasets>=2.15.0; extra == "deepspeed-testing" +Requires-Dist: dill<0.3.5; extra == "deepspeed-testing" +Requires-Dist: evaluate>=0.2.0; extra == "deepspeed-testing" +Requires-Dist: pytest-timeout; extra == "deepspeed-testing" +Requires-Dist: ruff==0.13.1; extra == "deepspeed-testing" +Requires-Dist: rouge-score!=0.0.7,!=0.0.8,!=0.1,!=0.1.1; extra == "deepspeed-testing" +Requires-Dist: nltk<=3.8.1; extra == "deepspeed-testing" +Requires-Dist: GitPython<3.1.19; extra == "deepspeed-testing" +Requires-Dist: sacremoses; extra == "deepspeed-testing" +Requires-Dist: rjieba; extra == "deepspeed-testing" +Requires-Dist: beautifulsoup4; extra == "deepspeed-testing" +Requires-Dist: tensorboard; extra == "deepspeed-testing" +Requires-Dist: pydantic>=2; extra == "deepspeed-testing" +Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "deepspeed-testing" +Requires-Dist: sacrebleu<2.0.0,>=1.4.12; extra == "deepspeed-testing" +Requires-Dist: libcst; extra == "deepspeed-testing" +Requires-Dist: faiss-cpu; extra == "deepspeed-testing" +Requires-Dist: datasets>=2.15.0; extra == "deepspeed-testing" +Requires-Dist: cookiecutter==1.7.3; extra == "deepspeed-testing" +Requires-Dist: mistral-common[opencv]>=1.6.3; extra == "deepspeed-testing" +Requires-Dist: openai>=1.98.0; extra == "deepspeed-testing" +Requires-Dist: pydantic>=2; extra == "deepspeed-testing" +Requires-Dist: uvicorn; extra == "deepspeed-testing" +Requires-Dist: fastapi; extra == "deepspeed-testing" +Requires-Dist: starlette; extra == "deepspeed-testing" +Requires-Dist: torch>=2.2; extra == "deepspeed-testing" +Requires-Dist: accelerate>=0.26.0; extra == "deepspeed-testing" +Requires-Dist: optuna; extra == "deepspeed-testing" +Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "deepspeed-testing" +Requires-Dist: protobuf; extra == "deepspeed-testing" +Provides-Extra: ruff +Requires-Dist: ruff==0.13.1; extra == "ruff" +Provides-Extra: quality +Requires-Dist: datasets>=2.15.0; extra == "quality" +Requires-Dist: ruff==0.13.1; extra == "quality" +Requires-Dist: GitPython<3.1.19; extra == "quality" +Requires-Dist: urllib3<2.0.0; extra == "quality" +Requires-Dist: libcst; extra == "quality" +Requires-Dist: rich; extra == "quality" +Requires-Dist: pandas<2.3.0; extra == "quality" +Provides-Extra: all +Requires-Dist: tensorflow<2.16,>2.9; extra == "all" +Requires-Dist: onnxconverter-common; extra == "all" +Requires-Dist: tf2onnx; extra == "all" +Requires-Dist: tensorflow-text<2.16; extra == "all" +Requires-Dist: keras-nlp<0.14.0,>=0.3.1; extra == "all" +Requires-Dist: torch>=2.2; extra == "all" +Requires-Dist: accelerate>=0.26.0; extra == "all" +Requires-Dist: jax<=0.4.13,>=0.4.1; extra == "all" +Requires-Dist: jaxlib<=0.4.13,>=0.4.1; extra == "all" +Requires-Dist: flax<=0.7.0,>=0.4.1; extra == "all" +Requires-Dist: optax<=0.1.4,>=0.0.8; extra == "all" +Requires-Dist: scipy<1.13.0; extra == "all" +Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "all" +Requires-Dist: protobuf; extra == "all" +Requires-Dist: tokenizers<=0.23.0,>=0.22.0; extra == "all" +Requires-Dist: torchaudio; extra == "all" +Requires-Dist: librosa; extra == "all" +Requires-Dist: pyctcdecode>=0.4.0; extra == "all" +Requires-Dist: phonemizer; extra == "all" +Requires-Dist: kenlm; extra == "all" +Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "all" +Requires-Dist: kernels<=0.9,>=0.6.1; extra == "all" +Requires-Dist: optuna; extra == "all" +Requires-Dist: ray[tune]>=2.7.0; extra == "all" +Requires-Dist: timm!=1.0.18,<=1.0.19; extra == "all" +Requires-Dist: torchvision; extra == "all" +Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "all" +Requires-Dist: codecarbon>=2.8.1; extra == "all" +Requires-Dist: accelerate>=0.26.0; extra == "all" +Requires-Dist: av; extra == "all" +Requires-Dist: num2words; extra == "all" +Requires-Dist: mistral-common[opencv]>=1.6.3; extra == "all" +Requires-Dist: jinja2>=3.1.0; extra == "all" +Provides-Extra: dev-torch +Requires-Dist: pytest>=7.2.0; extra == "dev-torch" +Requires-Dist: pytest-asyncio; extra == "dev-torch" +Requires-Dist: pytest-rich; extra == "dev-torch" +Requires-Dist: pytest-xdist; extra == "dev-torch" +Requires-Dist: pytest-order; extra == "dev-torch" +Requires-Dist: pytest-rerunfailures<16.0; extra == "dev-torch" +Requires-Dist: timeout-decorator; extra == "dev-torch" +Requires-Dist: parameterized>=0.9; extra == "dev-torch" +Requires-Dist: psutil; extra == "dev-torch" +Requires-Dist: datasets>=2.15.0; extra == "dev-torch" +Requires-Dist: dill<0.3.5; extra == "dev-torch" +Requires-Dist: evaluate>=0.2.0; extra == "dev-torch" +Requires-Dist: pytest-timeout; extra == "dev-torch" +Requires-Dist: ruff==0.13.1; extra == "dev-torch" +Requires-Dist: rouge-score!=0.0.7,!=0.0.8,!=0.1,!=0.1.1; extra == "dev-torch" +Requires-Dist: nltk<=3.8.1; extra == "dev-torch" +Requires-Dist: GitPython<3.1.19; extra == "dev-torch" +Requires-Dist: sacremoses; extra == "dev-torch" +Requires-Dist: rjieba; extra == "dev-torch" +Requires-Dist: beautifulsoup4; extra == "dev-torch" +Requires-Dist: tensorboard; extra == "dev-torch" +Requires-Dist: pydantic>=2; extra == "dev-torch" +Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "dev-torch" +Requires-Dist: sacrebleu<2.0.0,>=1.4.12; extra == "dev-torch" +Requires-Dist: libcst; extra == "dev-torch" +Requires-Dist: faiss-cpu; extra == "dev-torch" +Requires-Dist: datasets>=2.15.0; extra == "dev-torch" +Requires-Dist: cookiecutter==1.7.3; extra == "dev-torch" +Requires-Dist: mistral-common[opencv]>=1.6.3; extra == "dev-torch" +Requires-Dist: openai>=1.98.0; extra == "dev-torch" +Requires-Dist: pydantic>=2; extra == "dev-torch" +Requires-Dist: uvicorn; extra == "dev-torch" +Requires-Dist: fastapi; extra == "dev-torch" +Requires-Dist: starlette; extra == "dev-torch" +Requires-Dist: torch>=2.2; extra == "dev-torch" +Requires-Dist: accelerate>=0.26.0; extra == "dev-torch" +Requires-Dist: torch>=2.2; extra == "dev-torch" +Requires-Dist: accelerate>=0.26.0; extra == "dev-torch" +Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "dev-torch" +Requires-Dist: protobuf; extra == "dev-torch" +Requires-Dist: tokenizers<=0.23.0,>=0.22.0; extra == "dev-torch" +Requires-Dist: torchaudio; extra == "dev-torch" +Requires-Dist: librosa; extra == "dev-torch" +Requires-Dist: pyctcdecode>=0.4.0; extra == "dev-torch" +Requires-Dist: phonemizer; extra == "dev-torch" +Requires-Dist: kenlm; extra == "dev-torch" +Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "dev-torch" +Requires-Dist: kernels<=0.9,>=0.6.1; extra == "dev-torch" +Requires-Dist: optuna; extra == "dev-torch" +Requires-Dist: ray[tune]>=2.7.0; extra == "dev-torch" +Requires-Dist: timm!=1.0.18,<=1.0.19; extra == "dev-torch" +Requires-Dist: torchvision; extra == "dev-torch" +Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "dev-torch" +Requires-Dist: codecarbon>=2.8.1; extra == "dev-torch" +Requires-Dist: datasets>=2.15.0; extra == "dev-torch" +Requires-Dist: ruff==0.13.1; extra == "dev-torch" +Requires-Dist: GitPython<3.1.19; extra == "dev-torch" +Requires-Dist: urllib3<2.0.0; extra == "dev-torch" +Requires-Dist: libcst; extra == "dev-torch" +Requires-Dist: rich; extra == "dev-torch" +Requires-Dist: pandas<2.3.0; extra == "dev-torch" +Requires-Dist: fugashi>=1.0; extra == "dev-torch" +Requires-Dist: ipadic<2.0,>=1.0.0; extra == "dev-torch" +Requires-Dist: unidic_lite>=1.0.7; extra == "dev-torch" +Requires-Dist: unidic>=1.0.2; extra == "dev-torch" +Requires-Dist: sudachipy>=0.6.6; extra == "dev-torch" +Requires-Dist: sudachidict_core>=20220729; extra == "dev-torch" +Requires-Dist: rhoknp<1.3.1,>=1.1.0; extra == "dev-torch" +Requires-Dist: scikit-learn; extra == "dev-torch" +Requires-Dist: cookiecutter==1.7.3; extra == "dev-torch" +Requires-Dist: onnxruntime>=1.4.0; extra == "dev-torch" +Requires-Dist: onnxruntime-tools>=1.4.2; extra == "dev-torch" +Requires-Dist: num2words; extra == "dev-torch" +Provides-Extra: dev-tensorflow +Requires-Dist: pytest>=7.2.0; extra == "dev-tensorflow" +Requires-Dist: pytest-asyncio; extra == "dev-tensorflow" +Requires-Dist: pytest-rich; extra == "dev-tensorflow" +Requires-Dist: pytest-xdist; extra == "dev-tensorflow" +Requires-Dist: pytest-order; extra == "dev-tensorflow" +Requires-Dist: pytest-rerunfailures<16.0; extra == "dev-tensorflow" +Requires-Dist: timeout-decorator; extra == "dev-tensorflow" +Requires-Dist: parameterized>=0.9; extra == "dev-tensorflow" +Requires-Dist: psutil; extra == "dev-tensorflow" +Requires-Dist: datasets>=2.15.0; extra == "dev-tensorflow" +Requires-Dist: dill<0.3.5; extra == "dev-tensorflow" +Requires-Dist: evaluate>=0.2.0; extra == "dev-tensorflow" +Requires-Dist: pytest-timeout; extra == "dev-tensorflow" +Requires-Dist: ruff==0.13.1; extra == "dev-tensorflow" +Requires-Dist: rouge-score!=0.0.7,!=0.0.8,!=0.1,!=0.1.1; extra == "dev-tensorflow" +Requires-Dist: nltk<=3.8.1; extra == "dev-tensorflow" +Requires-Dist: GitPython<3.1.19; extra == "dev-tensorflow" +Requires-Dist: sacremoses; extra == "dev-tensorflow" +Requires-Dist: rjieba; extra == "dev-tensorflow" +Requires-Dist: beautifulsoup4; extra == "dev-tensorflow" +Requires-Dist: tensorboard; extra == "dev-tensorflow" +Requires-Dist: pydantic>=2; extra == "dev-tensorflow" +Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "dev-tensorflow" +Requires-Dist: sacrebleu<2.0.0,>=1.4.12; extra == "dev-tensorflow" +Requires-Dist: libcst; extra == "dev-tensorflow" +Requires-Dist: faiss-cpu; extra == "dev-tensorflow" +Requires-Dist: datasets>=2.15.0; extra == "dev-tensorflow" +Requires-Dist: cookiecutter==1.7.3; extra == "dev-tensorflow" +Requires-Dist: mistral-common[opencv]>=1.6.3; extra == "dev-tensorflow" +Requires-Dist: openai>=1.98.0; extra == "dev-tensorflow" +Requires-Dist: pydantic>=2; extra == "dev-tensorflow" +Requires-Dist: uvicorn; extra == "dev-tensorflow" +Requires-Dist: fastapi; extra == "dev-tensorflow" +Requires-Dist: starlette; extra == "dev-tensorflow" +Requires-Dist: torch>=2.2; extra == "dev-tensorflow" +Requires-Dist: accelerate>=0.26.0; extra == "dev-tensorflow" +Requires-Dist: tensorflow<2.16,>2.9; extra == "dev-tensorflow" +Requires-Dist: onnxconverter-common; extra == "dev-tensorflow" +Requires-Dist: tf2onnx; extra == "dev-tensorflow" +Requires-Dist: tensorflow-text<2.16; extra == "dev-tensorflow" +Requires-Dist: keras-nlp<0.14.0,>=0.3.1; extra == "dev-tensorflow" +Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "dev-tensorflow" +Requires-Dist: protobuf; extra == "dev-tensorflow" +Requires-Dist: tokenizers<=0.23.0,>=0.22.0; extra == "dev-tensorflow" +Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "dev-tensorflow" +Requires-Dist: datasets>=2.15.0; extra == "dev-tensorflow" +Requires-Dist: ruff==0.13.1; extra == "dev-tensorflow" +Requires-Dist: GitPython<3.1.19; extra == "dev-tensorflow" +Requires-Dist: urllib3<2.0.0; extra == "dev-tensorflow" +Requires-Dist: libcst; extra == "dev-tensorflow" +Requires-Dist: rich; extra == "dev-tensorflow" +Requires-Dist: pandas<2.3.0; extra == "dev-tensorflow" +Requires-Dist: scikit-learn; extra == "dev-tensorflow" +Requires-Dist: cookiecutter==1.7.3; extra == "dev-tensorflow" +Requires-Dist: onnxconverter-common; extra == "dev-tensorflow" +Requires-Dist: tf2onnx; extra == "dev-tensorflow" +Requires-Dist: onnxruntime>=1.4.0; extra == "dev-tensorflow" +Requires-Dist: onnxruntime-tools>=1.4.2; extra == "dev-tensorflow" +Requires-Dist: librosa; extra == "dev-tensorflow" +Requires-Dist: pyctcdecode>=0.4.0; extra == "dev-tensorflow" +Requires-Dist: phonemizer; extra == "dev-tensorflow" +Requires-Dist: kenlm; extra == "dev-tensorflow" +Provides-Extra: dev +Requires-Dist: tensorflow<2.16,>2.9; extra == "dev" +Requires-Dist: onnxconverter-common; extra == "dev" +Requires-Dist: tf2onnx; extra == "dev" +Requires-Dist: tensorflow-text<2.16; extra == "dev" +Requires-Dist: keras-nlp<0.14.0,>=0.3.1; extra == "dev" +Requires-Dist: torch>=2.2; extra == "dev" +Requires-Dist: accelerate>=0.26.0; extra == "dev" +Requires-Dist: jax<=0.4.13,>=0.4.1; extra == "dev" +Requires-Dist: jaxlib<=0.4.13,>=0.4.1; extra == "dev" +Requires-Dist: flax<=0.7.0,>=0.4.1; extra == "dev" +Requires-Dist: optax<=0.1.4,>=0.0.8; extra == "dev" +Requires-Dist: scipy<1.13.0; extra == "dev" +Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "dev" +Requires-Dist: protobuf; extra == "dev" +Requires-Dist: tokenizers<=0.23.0,>=0.22.0; extra == "dev" +Requires-Dist: torchaudio; extra == "dev" +Requires-Dist: librosa; extra == "dev" +Requires-Dist: pyctcdecode>=0.4.0; extra == "dev" +Requires-Dist: phonemizer; extra == "dev" +Requires-Dist: kenlm; extra == "dev" +Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "dev" +Requires-Dist: kernels<=0.9,>=0.6.1; extra == "dev" +Requires-Dist: optuna; extra == "dev" +Requires-Dist: ray[tune]>=2.7.0; extra == "dev" +Requires-Dist: timm!=1.0.18,<=1.0.19; extra == "dev" +Requires-Dist: torchvision; extra == "dev" +Requires-Dist: Pillow<=15.0,>=10.0.1; extra == "dev" +Requires-Dist: codecarbon>=2.8.1; extra == "dev" +Requires-Dist: accelerate>=0.26.0; extra == "dev" +Requires-Dist: av; extra == "dev" +Requires-Dist: num2words; extra == "dev" +Requires-Dist: mistral-common[opencv]>=1.6.3; extra == "dev" +Requires-Dist: jinja2>=3.1.0; extra == "dev" +Requires-Dist: pytest>=7.2.0; extra == "dev" +Requires-Dist: pytest-asyncio; extra == "dev" +Requires-Dist: pytest-rich; extra == "dev" +Requires-Dist: pytest-xdist; extra == "dev" +Requires-Dist: pytest-order; extra == "dev" +Requires-Dist: pytest-rerunfailures<16.0; extra == "dev" +Requires-Dist: timeout-decorator; extra == "dev" +Requires-Dist: parameterized>=0.9; extra == "dev" +Requires-Dist: psutil; extra == "dev" +Requires-Dist: datasets>=2.15.0; extra == "dev" +Requires-Dist: dill<0.3.5; extra == "dev" +Requires-Dist: evaluate>=0.2.0; extra == "dev" +Requires-Dist: pytest-timeout; extra == "dev" +Requires-Dist: ruff==0.13.1; extra == "dev" +Requires-Dist: rouge-score!=0.0.7,!=0.0.8,!=0.1,!=0.1.1; extra == "dev" +Requires-Dist: nltk<=3.8.1; extra == "dev" +Requires-Dist: GitPython<3.1.19; extra == "dev" +Requires-Dist: sacremoses; extra == "dev" +Requires-Dist: rjieba; extra == "dev" +Requires-Dist: beautifulsoup4; extra == "dev" +Requires-Dist: tensorboard; extra == "dev" +Requires-Dist: pydantic>=2; extra == "dev" +Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "dev" +Requires-Dist: sacrebleu<2.0.0,>=1.4.12; extra == "dev" +Requires-Dist: libcst; extra == "dev" +Requires-Dist: faiss-cpu; extra == "dev" +Requires-Dist: datasets>=2.15.0; extra == "dev" +Requires-Dist: cookiecutter==1.7.3; extra == "dev" +Requires-Dist: mistral-common[opencv]>=1.6.3; extra == "dev" +Requires-Dist: openai>=1.98.0; extra == "dev" +Requires-Dist: pydantic>=2; extra == "dev" +Requires-Dist: uvicorn; extra == "dev" +Requires-Dist: fastapi; extra == "dev" +Requires-Dist: starlette; extra == "dev" +Requires-Dist: torch>=2.2; extra == "dev" +Requires-Dist: accelerate>=0.26.0; extra == "dev" +Requires-Dist: datasets>=2.15.0; extra == "dev" +Requires-Dist: ruff==0.13.1; extra == "dev" +Requires-Dist: GitPython<3.1.19; extra == "dev" +Requires-Dist: urllib3<2.0.0; extra == "dev" +Requires-Dist: libcst; extra == "dev" +Requires-Dist: rich; extra == "dev" +Requires-Dist: pandas<2.3.0; extra == "dev" +Requires-Dist: fugashi>=1.0; extra == "dev" +Requires-Dist: ipadic<2.0,>=1.0.0; extra == "dev" +Requires-Dist: unidic_lite>=1.0.7; extra == "dev" +Requires-Dist: unidic>=1.0.2; extra == "dev" +Requires-Dist: sudachipy>=0.6.6; extra == "dev" +Requires-Dist: sudachidict_core>=20220729; extra == "dev" +Requires-Dist: rhoknp<1.3.1,>=1.1.0; extra == "dev" +Requires-Dist: scikit-learn; extra == "dev" +Requires-Dist: cookiecutter==1.7.3; extra == "dev" +Provides-Extra: torchhub +Requires-Dist: filelock; extra == "torchhub" +Requires-Dist: huggingface-hub<1.0,>=0.34.0; extra == "torchhub" +Requires-Dist: importlib_metadata; extra == "torchhub" +Requires-Dist: numpy>=1.17; extra == "torchhub" +Requires-Dist: packaging>=20.0; extra == "torchhub" +Requires-Dist: protobuf; extra == "torchhub" +Requires-Dist: regex!=2019.12.17; extra == "torchhub" +Requires-Dist: requests; extra == "torchhub" +Requires-Dist: sentencepiece!=0.1.92,>=0.1.91; extra == "torchhub" +Requires-Dist: torch>=2.2; extra == "torchhub" +Requires-Dist: tokenizers<=0.23.0,>=0.22.0; extra == "torchhub" +Requires-Dist: tqdm>=4.27; extra == "torchhub" +Provides-Extra: benchmark +Requires-Dist: optimum-benchmark>=0.3.0; extra == "benchmark" +Provides-Extra: open-telemetry +Requires-Dist: opentelemetry-api; extra == "open-telemetry" +Requires-Dist: opentelemetry-exporter-otlp; extra == "open-telemetry" +Requires-Dist: opentelemetry-sdk; extra == "open-telemetry" +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: description-content-type +Dynamic: home-page +Dynamic: keywords +Dynamic: license +Dynamic: license-file +Dynamic: provides-extra +Dynamic: requires-dist +Dynamic: requires-python +Dynamic: summary + + + +

+ + + + Hugging Face Transformers Library + +
+
+

+ +

+ Checkpoints on Hub + Build + GitHub + Documentation + GitHub release + Contributor Covenant + DOI +

+ +

+

+ English | + 简体中文 | + 繁體中文 | + 한국어 | + Español | + 日本語 | + हिन्दी | + Русский | + Português | + తెలుగు | + Français | + Deutsch | + Italiano | + Tiếng Việt | + العربية | + اردو | + বাংলা | +

+

+ +

+

State-of-the-art pretrained models for inference and training

+

+ +

+ +

+ +Transformers acts as the model-definition framework for state-of-the-art machine learning models in text, computer +vision, audio, video, and multimodal model, for both inference and training. + +It centralizes the model definition so that this definition is agreed upon across the ecosystem. `transformers` is the +pivot across frameworks: if a model definition is supported, it will be compatible with the majority of training +frameworks (Axolotl, Unsloth, DeepSpeed, FSDP, PyTorch-Lightning, ...), inference engines (vLLM, SGLang, TGI, ...), +and adjacent modeling libraries (llama.cpp, mlx, ...) which leverage the model definition from `transformers`. + +We pledge to help support new state-of-the-art models and democratize their usage by having their model definition be +simple, customizable, and efficient. + +There are over 1M+ Transformers [model checkpoints](https://huggingface.co/models?library=transformers&sort=trending) on the [Hugging Face Hub](https://huggingface.com/models) you can use. + +Explore the [Hub](https://huggingface.com/) today to find a model and use Transformers to help you get started right away. + +## Installation + +Transformers works with Python 3.9+ [PyTorch](https://pytorch.org/get-started/locally/) 2.1+, [TensorFlow](https://www.tensorflow.org/install/pip) 2.6+, and [Flax](https://flax.readthedocs.io/en/latest/) 0.4.1+. + +Create and activate a virtual environment with [venv](https://docs.python.org/3/library/venv.html) or [uv](https://docs.astral.sh/uv/), a fast Rust-based Python package and project manager. + +```py +# venv +python -m venv .my-env +source .my-env/bin/activate +# uv +uv venv .my-env +source .my-env/bin/activate +``` + +Install Transformers in your virtual environment. + +```py +# pip +pip install "transformers[torch]" + +# uv +uv pip install "transformers[torch]" +``` + +Install Transformers from source if you want the latest changes in the library or are interested in contributing. However, the *latest* version may not be stable. Feel free to open an [issue](https://github.com/huggingface/transformers/issues) if you encounter an error. + +```shell +git clone https://github.com/huggingface/transformers.git +cd transformers + +# pip +pip install '.[torch]' + +# uv +uv pip install '.[torch]' +``` + +## Quickstart + +Get started with Transformers right away with the [Pipeline](https://huggingface.co/docs/transformers/pipeline_tutorial) API. The `Pipeline` is a high-level inference class that supports text, audio, vision, and multimodal tasks. It handles preprocessing the input and returns the appropriate output. + +Instantiate a pipeline and specify model to use for text generation. The model is downloaded and cached so you can easily reuse it again. Finally, pass some text to prompt the model. + +```py +from transformers import pipeline + +pipeline = pipeline(task="text-generation", model="Qwen/Qwen2.5-1.5B") +pipeline("the secret to baking a really good cake is ") +[{'generated_text': 'the secret to baking a really good cake is 1) to use the right ingredients and 2) to follow the recipe exactly. the recipe for the cake is as follows: 1 cup of sugar, 1 cup of flour, 1 cup of milk, 1 cup of butter, 1 cup of eggs, 1 cup of chocolate chips. if you want to make 2 cakes, how much sugar do you need? To make 2 cakes, you will need 2 cups of sugar.'}] +``` + +To chat with a model, the usage pattern is the same. The only difference is you need to construct a chat history (the input to `Pipeline`) between you and the system. + +> [!TIP] +> You can also chat with a model directly from the command line. +> ```shell +> transformers chat Qwen/Qwen2.5-0.5B-Instruct +> ``` + +```py +import torch +from transformers import pipeline + +chat = [ + {"role": "system", "content": "You are a sassy, wise-cracking robot as imagined by Hollywood circa 1986."}, + {"role": "user", "content": "Hey, can you tell me any fun things to do in New York?"} +] + +pipeline = pipeline(task="text-generation", model="meta-llama/Meta-Llama-3-8B-Instruct", dtype=torch.bfloat16, device_map="auto") +response = pipeline(chat, max_new_tokens=512) +print(response[0]["generated_text"][-1]["content"]) +``` + +Expand the examples below to see how `Pipeline` works for different modalities and tasks. + +
+Automatic speech recognition + +```py +from transformers import pipeline + +pipeline = pipeline(task="automatic-speech-recognition", model="openai/whisper-large-v3") +pipeline("https://huggingface.co/datasets/Narsil/asr_dummy/resolve/main/mlk.flac") +{'text': ' I have a dream that one day this nation will rise up and live out the true meaning of its creed.'} +``` + +
+ +
+Image classification + +

+ +

+ +```py +from transformers import pipeline + +pipeline = pipeline(task="image-classification", model="facebook/dinov2-small-imagenet1k-1-layer") +pipeline("https://huggingface.co/datasets/Narsil/image_dummy/raw/main/parrots.png") +[{'label': 'macaw', 'score': 0.997848391532898}, + {'label': 'sulphur-crested cockatoo, Kakatoe galerita, Cacatua galerita', + 'score': 0.0016551691805943847}, + {'label': 'lorikeet', 'score': 0.00018523589824326336}, + {'label': 'African grey, African gray, Psittacus erithacus', + 'score': 7.85409429227002e-05}, + {'label': 'quail', 'score': 5.502637941390276e-05}] +``` + +
+ +
+Visual question answering + +

+ +

+ +```py +from transformers import pipeline + +pipeline = pipeline(task="visual-question-answering", model="Salesforce/blip-vqa-base") +pipeline( + image="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/transformers/tasks/idefics-few-shot.jpg", + question="What is in the image?", +) +[{'answer': 'statue of liberty'}] +``` + +
+ +## Why should I use Transformers? + +1. Easy-to-use state-of-the-art models: + - High performance on natural language understanding & generation, computer vision, audio, video, and multimodal tasks. + - Low barrier to entry for researchers, engineers, and developers. + - Few user-facing abstractions with just three classes to learn. + - A unified API for using all our pretrained models. + +1. Lower compute costs, smaller carbon footprint: + - Share trained models instead of training from scratch. + - Reduce compute time and production costs. + - Dozens of model architectures with 1M+ pretrained checkpoints across all modalities. + +1. Choose the right framework for every part of a models lifetime: + - Train state-of-the-art models in 3 lines of code. + - Move a single model between PyTorch/JAX/TF2.0 frameworks at will. + - Pick the right framework for training, evaluation, and production. + +1. Easily customize a model or an example to your needs: + - We provide examples for each architecture to reproduce the results published by its original authors. + - Model internals are exposed as consistently as possible. + - Model files can be used independently of the library for quick experiments. + + + Hugging Face Enterprise Hub +
+ +## Why shouldn't I use Transformers? + +- This library is not a modular toolbox of building blocks for neural nets. The code in the model files is not refactored with additional abstractions on purpose, so that researchers can quickly iterate on each of the models without diving into additional abstractions/files. +- The training API is optimized to work with PyTorch models provided by Transformers. For generic machine learning loops, you should use another library like [Accelerate](https://huggingface.co/docs/accelerate). +- The [example scripts](https://github.com/huggingface/transformers/tree/main/examples) are only *examples*. They may not necessarily work out-of-the-box on your specific use case and you'll need to adapt the code for it to work. + +## 100 projects using Transformers + +Transformers is more than a toolkit to use pretrained models, it's a community of projects built around it and the +Hugging Face Hub. We want Transformers to enable developers, researchers, students, professors, engineers, and anyone +else to build their dream projects. + +In order to celebrate Transformers 100,000 stars, we wanted to put the spotlight on the +community with the [awesome-transformers](./awesome-transformers.md) page which lists 100 +incredible projects built with Transformers. + +If you own or use a project that you believe should be part of the list, please open a PR to add it! + +## Example models + +You can test most of our models directly on their [Hub model pages](https://huggingface.co/models). + +Expand each modality below to see a few example models for various use cases. + +
+Audio + +- Audio classification with [Whisper](https://huggingface.co/openai/whisper-large-v3-turbo) +- Automatic speech recognition with [Moonshine](https://huggingface.co/UsefulSensors/moonshine) +- Keyword spotting with [Wav2Vec2](https://huggingface.co/superb/wav2vec2-base-superb-ks) +- Speech to speech generation with [Moshi](https://huggingface.co/kyutai/moshiko-pytorch-bf16) +- Text to audio with [MusicGen](https://huggingface.co/facebook/musicgen-large) +- Text to speech with [Bark](https://huggingface.co/suno/bark) + +
+ +
+Computer vision + +- Automatic mask generation with [SAM](https://huggingface.co/facebook/sam-vit-base) +- Depth estimation with [DepthPro](https://huggingface.co/apple/DepthPro-hf) +- Image classification with [DINO v2](https://huggingface.co/facebook/dinov2-base) +- Keypoint detection with [SuperPoint](https://huggingface.co/magic-leap-community/superpoint) +- Keypoint matching with [SuperGlue](https://huggingface.co/magic-leap-community/superglue_outdoor) +- Object detection with [RT-DETRv2](https://huggingface.co/PekingU/rtdetr_v2_r50vd) +- Pose Estimation with [VitPose](https://huggingface.co/usyd-community/vitpose-base-simple) +- Universal segmentation with [OneFormer](https://huggingface.co/shi-labs/oneformer_ade20k_swin_large) +- Video classification with [VideoMAE](https://huggingface.co/MCG-NJU/videomae-large) + +
+ +
+Multimodal + +- Audio or text to text with [Qwen2-Audio](https://huggingface.co/Qwen/Qwen2-Audio-7B) +- Document question answering with [LayoutLMv3](https://huggingface.co/microsoft/layoutlmv3-base) +- Image or text to text with [Qwen-VL](https://huggingface.co/Qwen/Qwen2.5-VL-3B-Instruct) +- Image captioning [BLIP-2](https://huggingface.co/Salesforce/blip2-opt-2.7b) +- OCR-based document understanding with [GOT-OCR2](https://huggingface.co/stepfun-ai/GOT-OCR-2.0-hf) +- Table question answering with [TAPAS](https://huggingface.co/google/tapas-base) +- Unified multimodal understanding and generation with [Emu3](https://huggingface.co/BAAI/Emu3-Gen) +- Vision to text with [Llava-OneVision](https://huggingface.co/llava-hf/llava-onevision-qwen2-0.5b-ov-hf) +- Visual question answering with [Llava](https://huggingface.co/llava-hf/llava-1.5-7b-hf) +- Visual referring expression segmentation with [Kosmos-2](https://huggingface.co/microsoft/kosmos-2-patch14-224) + +
+ +
+NLP + +- Masked word completion with [ModernBERT](https://huggingface.co/answerdotai/ModernBERT-base) +- Named entity recognition with [Gemma](https://huggingface.co/google/gemma-2-2b) +- Question answering with [Mixtral](https://huggingface.co/mistralai/Mixtral-8x7B-v0.1) +- Summarization with [BART](https://huggingface.co/facebook/bart-large-cnn) +- Translation with [T5](https://huggingface.co/google-t5/t5-base) +- Text generation with [Llama](https://huggingface.co/meta-llama/Llama-3.2-1B) +- Text classification with [Qwen](https://huggingface.co/Qwen/Qwen2.5-0.5B) + +
+ +## Citation + +We now have a [paper](https://www.aclweb.org/anthology/2020.emnlp-demos.6/) you can cite for the 🤗 Transformers library: +```bibtex +@inproceedings{wolf-etal-2020-transformers, + title = "Transformers: State-of-the-Art Natural Language Processing", + author = "Thomas Wolf and Lysandre Debut and Victor Sanh and Julien Chaumond and Clement Delangue and Anthony Moi and Pierric Cistac and Tim Rault and Rémi Louf and Morgan Funtowicz and Joe Davison and Sam Shleifer and Patrick von Platen and Clara Ma and Yacine Jernite and Julien Plu and Canwen Xu and Teven Le Scao and Sylvain Gugger and Mariama Drame and Quentin Lhoest and Alexander M. Rush", + booktitle = "Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing: System Demonstrations", + month = oct, + year = "2020", + address = "Online", + publisher = "Association for Computational Linguistics", + url = "https://www.aclweb.org/anthology/2020.emnlp-demos.6", + pages = "38--45" +} +``` diff --git a/venv/lib/python3.13/site-packages/transformers-4.57.2.dist-info/RECORD b/venv/lib/python3.13/site-packages/transformers-4.57.2.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..c297bd889952df4acf5f0852ce0efb994d5adbc3 --- /dev/null +++ b/venv/lib/python3.13/site-packages/transformers-4.57.2.dist-info/RECORD @@ -0,0 +1,4464 @@ +../../../bin/transformers,sha256=-AMCHgfSHog-CZM1AJyrZ2TFM47TOC8CDNFh_Vg_8q0,229 +../../../bin/transformers-cli,sha256=RaWn-UnjlCZvd8rjjHLlGNfn0MyP_NP7dAwobUeIMss,237 +transformers-4.57.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +transformers-4.57.2.dist-info/METADATA,sha256=m3p90Bg6sBOoRnkBOWYe76DXKfRkCAPejaHhvzP-SDM,43991 +transformers-4.57.2.dist-info/RECORD,, +transformers-4.57.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +transformers-4.57.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91 +transformers-4.57.2.dist-info/entry_points.txt,sha256=Zra3dVQyt6Q3fU_suoD3gF81JV3WeV8gH66vzoev408,144 +transformers-4.57.2.dist-info/licenses/LICENSE,sha256=d_1HEN757DwPYiWADgI18VpCWr1KiwNVkSf814JhIEk,11418 +transformers-4.57.2.dist-info/top_level.txt,sha256=GLBaeTo_CSdhnHvbxQ0kzpEHdlLuA_33foIogaWxntI,13 +transformers/__init__.py,sha256=yb0bqmxZroYcHajMKWTUv6OablhN82Xklj9AMteMOtM,47000 +transformers/__pycache__/__init__.cpython-313.pyc,, +transformers/__pycache__/activations.cpython-313.pyc,, +transformers/__pycache__/activations_tf.cpython-313.pyc,, +transformers/__pycache__/audio_utils.cpython-313.pyc,, +transformers/__pycache__/cache_utils.cpython-313.pyc,, +transformers/__pycache__/configuration_utils.cpython-313.pyc,, +transformers/__pycache__/convert_graph_to_onnx.cpython-313.pyc,, +transformers/__pycache__/convert_slow_tokenizer.cpython-313.pyc,, +transformers/__pycache__/convert_slow_tokenizers_checkpoints_to_fast.cpython-313.pyc,, +transformers/__pycache__/convert_tf_hub_seq_to_seq_bert_to_pytorch.cpython-313.pyc,, +transformers/__pycache__/debug_utils.cpython-313.pyc,, +transformers/__pycache__/dependency_versions_check.cpython-313.pyc,, +transformers/__pycache__/dependency_versions_table.cpython-313.pyc,, +transformers/__pycache__/dynamic_module_utils.cpython-313.pyc,, +transformers/__pycache__/feature_extraction_sequence_utils.cpython-313.pyc,, +transformers/__pycache__/feature_extraction_utils.cpython-313.pyc,, +transformers/__pycache__/file_utils.cpython-313.pyc,, +transformers/__pycache__/hf_argparser.cpython-313.pyc,, +transformers/__pycache__/hyperparameter_search.cpython-313.pyc,, +transformers/__pycache__/image_processing_base.cpython-313.pyc,, +transformers/__pycache__/image_processing_utils.cpython-313.pyc,, +transformers/__pycache__/image_processing_utils_fast.cpython-313.pyc,, +transformers/__pycache__/image_transforms.cpython-313.pyc,, +transformers/__pycache__/image_utils.cpython-313.pyc,, +transformers/__pycache__/keras_callbacks.cpython-313.pyc,, +transformers/__pycache__/masking_utils.cpython-313.pyc,, +transformers/__pycache__/model_debugging_utils.cpython-313.pyc,, +transformers/__pycache__/modelcard.cpython-313.pyc,, +transformers/__pycache__/modeling_attn_mask_utils.cpython-313.pyc,, +transformers/__pycache__/modeling_flash_attention_utils.cpython-313.pyc,, +transformers/__pycache__/modeling_flax_outputs.cpython-313.pyc,, +transformers/__pycache__/modeling_flax_pytorch_utils.cpython-313.pyc,, +transformers/__pycache__/modeling_flax_utils.cpython-313.pyc,, +transformers/__pycache__/modeling_gguf_pytorch_utils.cpython-313.pyc,, +transformers/__pycache__/modeling_layers.cpython-313.pyc,, +transformers/__pycache__/modeling_outputs.cpython-313.pyc,, +transformers/__pycache__/modeling_rope_utils.cpython-313.pyc,, +transformers/__pycache__/modeling_tf_outputs.cpython-313.pyc,, +transformers/__pycache__/modeling_tf_pytorch_utils.cpython-313.pyc,, +transformers/__pycache__/modeling_tf_utils.cpython-313.pyc,, +transformers/__pycache__/modeling_utils.cpython-313.pyc,, +transformers/__pycache__/optimization.cpython-313.pyc,, +transformers/__pycache__/optimization_tf.cpython-313.pyc,, +transformers/__pycache__/processing_utils.cpython-313.pyc,, +transformers/__pycache__/pytorch_utils.cpython-313.pyc,, +transformers/__pycache__/safetensors_conversion.cpython-313.pyc,, +transformers/__pycache__/testing_utils.cpython-313.pyc,, +transformers/__pycache__/tf_utils.cpython-313.pyc,, +transformers/__pycache__/time_series_utils.cpython-313.pyc,, +transformers/__pycache__/tokenization_mistral_common.cpython-313.pyc,, +transformers/__pycache__/tokenization_utils.cpython-313.pyc,, +transformers/__pycache__/tokenization_utils_base.cpython-313.pyc,, +transformers/__pycache__/tokenization_utils_fast.cpython-313.pyc,, +transformers/__pycache__/trainer.cpython-313.pyc,, +transformers/__pycache__/trainer_callback.cpython-313.pyc,, +transformers/__pycache__/trainer_pt_utils.cpython-313.pyc,, +transformers/__pycache__/trainer_seq2seq.cpython-313.pyc,, +transformers/__pycache__/trainer_utils.cpython-313.pyc,, +transformers/__pycache__/training_args.cpython-313.pyc,, +transformers/__pycache__/training_args_seq2seq.cpython-313.pyc,, +transformers/__pycache__/training_args_tf.cpython-313.pyc,, +transformers/__pycache__/video_processing_utils.cpython-313.pyc,, +transformers/__pycache__/video_utils.cpython-313.pyc,, +transformers/activations.py,sha256=PdWoGx5eDFNxJW8A7-wZ31IlVCAxhzfbHgNDCpjPQmQ,13109 +transformers/activations_tf.py,sha256=TGmah3loMs_pERwxpjWb5-AUeHLoBAyDxFYWVuLC7FU,4729 +transformers/audio_utils.py,sha256=wDhFAweo28mpXu2OQTdw80gU-jgFgSHKny7ujdDfqVg,54284 +transformers/cache_utils.py,sha256=9OEcrxTHW976XjADuT7w6Ll_thAzdXBa2USws0TX2oo,67772 +transformers/commands/__init__.py,sha256=aFO3I7C6G9OLA9JZSc_yMaZl0glOQtjNPjqMFfu9wfQ,923 +transformers/commands/__pycache__/__init__.cpython-313.pyc,, +transformers/commands/__pycache__/add_fast_image_processor.cpython-313.pyc,, +transformers/commands/__pycache__/add_new_model_like.cpython-313.pyc,, +transformers/commands/__pycache__/chat.cpython-313.pyc,, +transformers/commands/__pycache__/convert.cpython-313.pyc,, +transformers/commands/__pycache__/download.cpython-313.pyc,, +transformers/commands/__pycache__/env.cpython-313.pyc,, +transformers/commands/__pycache__/run.cpython-313.pyc,, +transformers/commands/__pycache__/serving.cpython-313.pyc,, +transformers/commands/__pycache__/train.cpython-313.pyc,, +transformers/commands/__pycache__/transformers_cli.cpython-313.pyc,, +transformers/commands/add_fast_image_processor.py,sha256=HIVXaU8NERWdsSJuyjnSp8bAnXxHojrwPCegX9IcfYU,24141 +transformers/commands/add_new_model_like.py,sha256=LPGqUn8NQ0hT8IGFpCH4Vp2JcDjLI_UsZUJ7Dq7rngM,33366 +transformers/commands/chat.py,sha256=n8rguI-62QvOKLw_UlSi-ejPoR_B1MfKG0MCzo_YqcU,32326 +transformers/commands/convert.py,sha256=IhyqKqO33anJiIwneOBCogxREJkfH7qIP_3At2xnoVE,7064 +transformers/commands/download.py,sha256=GKPadx-YGBL7dHJSEcUp-QNOP3R2L71-gPGP0z6NNQI,2395 +transformers/commands/env.py,sha256=Y9yncwQsl6QnHpoJ3iiL40hN2STocBzyN46MYyt1vVA,6668 +transformers/commands/run.py,sha256=nyEe2lOoj6e0EOxjKeF08hdW9WVWa101r9hWXl9v3Jo,4249 +transformers/commands/serving.py,sha256=Yq3PZLWdQeKNLSR7gMihl2TtRMh4Wy6sVtHLuFBH3WE,70902 +transformers/commands/train.py,sha256=SDGD_DF2-y9n2sqW2c77j5a4B9Lj8sRWHZ-VU4bnx_U,6337 +transformers/commands/transformers_cli.py,sha256=cFlXM_DHUCFgf6KnjpAcvebihZL5UKKIOlZtixopBVw,2281 +transformers/configuration_utils.py,sha256=CKOInBuKc9NAo3yT9yNG7g44XjObenKgO1wCbUlDuvc,65603 +transformers/convert_graph_to_onnx.py,sha256=g-BvJuYIq2wDmHxQ0Ng2DrpwqNshxAbQNk4zjegX4nw,20130 +transformers/convert_slow_tokenizer.py,sha256=8xMzmAR4DOVMZzISg4tUXYt8OsKY0pMjgLwWa6oi_N8,68698 +transformers/convert_slow_tokenizers_checkpoints_to_fast.py,sha256=Sa8NS-oVEYDgqYEhUfg-WuB4a8RsLReIu067twp8uCA,5061 +transformers/convert_tf_hub_seq_to_seq_bert_to_pytorch.py,sha256=02fwRNsiK3zmmL9O_hgsduomBuTDHWh8vcTyk2GOlz8,2895 +transformers/data/__init__.py,sha256=MuXSchTzRSaUtUDC1uSeDkHiSbjtrQZg4IoKeKHoH6A,1490 +transformers/data/__pycache__/__init__.cpython-313.pyc,, +transformers/data/__pycache__/data_collator.cpython-313.pyc,, +transformers/data/data_collator.py,sha256=IWfYlv3cM6KGQqGeaGzedLf0gIjyEVUw7_u_W605hPQ,105604 +transformers/data/datasets/__init__.py,sha256=PGzUJjdmTPOPMyjV4-Tj3sNrmmh-lspjyxrVbrfJoX8,909 +transformers/data/datasets/__pycache__/__init__.cpython-313.pyc,, +transformers/data/datasets/__pycache__/glue.cpython-313.pyc,, +transformers/data/datasets/__pycache__/language_modeling.cpython-313.pyc,, +transformers/data/datasets/__pycache__/squad.cpython-313.pyc,, +transformers/data/datasets/glue.py,sha256=d2ys4oU49fQJ3ZXLpGGyou54lWOY2UJMUbZcdqaBNxg,6245 +transformers/data/datasets/language_modeling.py,sha256=tNZvgig_gzJzuEjc0wGVQa86Jx2wUaQMATDCBrqN-z8,23709 +transformers/data/datasets/squad.py,sha256=O3g7HTnVqCRNYfmDG_gj5KqwEe2oi2pw5QH7jjOtMPw,9275 +transformers/data/metrics/__init__.py,sha256=o9t_VTQtqU3lEhqvocDzFMm7OvAKD-uxrjPWy0r74BI,3632 +transformers/data/metrics/__pycache__/__init__.cpython-313.pyc,, +transformers/data/metrics/__pycache__/squad_metrics.cpython-313.pyc,, +transformers/data/metrics/squad_metrics.py,sha256=__cjdPU1qt3bjnXq3k6CC2_p_5DxbU8p6I5EL40unrg,29685 +transformers/data/processors/__init__.py,sha256=lvN5mp9mdrr5v6QvZT6VcoZ78zZUvXiumTm6Gdvlgvo,1014 +transformers/data/processors/__pycache__/__init__.cpython-313.pyc,, +transformers/data/processors/__pycache__/glue.cpython-313.pyc,, +transformers/data/processors/__pycache__/squad.cpython-313.pyc,, +transformers/data/processors/__pycache__/utils.cpython-313.pyc,, +transformers/data/processors/__pycache__/xnli.cpython-313.pyc,, +transformers/data/processors/glue.py,sha256=IGwrYOn1sg6mztFzwfA_Eb9KyuvIYL4iYBDe5b-m83Y,23214 +transformers/data/processors/squad.py,sha256=aKeAhkB_zAZliI0n8V4rYHFPGJChND3OZ0AN9wHs2c8,33303 +transformers/data/processors/utils.py,sha256=tljqv-RDmkbfutIo2cUYbJuL75PfXMB3IP2mOM4gQJA,13823 +transformers/data/processors/xnli.py,sha256=sgcYz9YSfHY9NS0LO_YeFRRjq-nJFsDhFUP4NJeu-Q4,3481 +transformers/debug_utils.py,sha256=6m6Ks51IXFlIhEPdbXbsFi_3ZWjM34kpxV5l7-LR4bU,12891 +transformers/dependency_versions_check.py,sha256=6HbgtT2Wp-QZGOAdyUOklHvNA4rOVITGHrX34dtMOqg,2115 +transformers/dependency_versions_table.py,sha256=0rU7PttV_nc9e4Q6NYaxfHW-thBJDwCt8497mqqgWPk,3855 +transformers/distributed/__init__.py,sha256=ds-xiU6Hko8BN-XiIF2cJZPCjrQ-JFlodRARkPK8g-0,978 +transformers/distributed/__pycache__/__init__.cpython-313.pyc,, +transformers/distributed/__pycache__/configuration_utils.cpython-313.pyc,, +transformers/distributed/configuration_utils.py,sha256=rBPisXQ4szdnjxqxtFWOJYjOtnQ7JSCdbWs4_3xA1fU,4438 +transformers/dynamic_module_utils.py,sha256=TNUsqcfh-iJNfXARW50i6io__Y9DzE6pAo28h2oHJg0,36401 +transformers/feature_extraction_sequence_utils.py,sha256=U60TIDSbdFI_MD0Jxoe_aEGrwjoqEafP6eGMYKpF9jE,18273 +transformers/feature_extraction_utils.py,sha256=aGEAnlqhlmdsWzgnrb1ksDwhe8b1HxVg4Nvsvpo2Ht8,30754 +transformers/file_utils.py,sha256=iLp699qwOJKgEAtf1lLU_v26J8jAWC8q5FZ24fpdUd0,3677 +transformers/generation/__init__.py,sha256=KigBJf1VOUTzDzHAZjADHd3lTwt6c9H8vwFGsZxIAFg,12338 +transformers/generation/__pycache__/__init__.cpython-313.pyc,, +transformers/generation/__pycache__/beam_constraints.cpython-313.pyc,, +transformers/generation/__pycache__/beam_search.cpython-313.pyc,, +transformers/generation/__pycache__/candidate_generator.cpython-313.pyc,, +transformers/generation/__pycache__/configuration_utils.cpython-313.pyc,, +transformers/generation/__pycache__/flax_logits_process.cpython-313.pyc,, +transformers/generation/__pycache__/flax_utils.cpython-313.pyc,, +transformers/generation/__pycache__/logits_process.cpython-313.pyc,, +transformers/generation/__pycache__/stopping_criteria.cpython-313.pyc,, +transformers/generation/__pycache__/streamers.cpython-313.pyc,, +transformers/generation/__pycache__/tf_logits_process.cpython-313.pyc,, +transformers/generation/__pycache__/tf_utils.cpython-313.pyc,, +transformers/generation/__pycache__/utils.cpython-313.pyc,, +transformers/generation/__pycache__/watermarking.cpython-313.pyc,, +transformers/generation/beam_constraints.py,sha256=9sxTCZKDyllla5fviGdxYhoNs3LzkfWjZkLSJM4nRJE,19697 +transformers/generation/beam_search.py,sha256=kYMggx0DErcNcJyhQk5k1ZcdqeuZaQ4OxqjtdpeKZhI,48619 +transformers/generation/candidate_generator.py,sha256=mAmghyDWHLDnu5mGheb7mMSw52o_c8wM8HYD1poAlQs,63997 +transformers/generation/configuration_utils.py,sha256=2D8igfk5QCvhYzop88dg4p9dLyhCWNjtmWk7hzdEB0s,76547 +transformers/generation/continuous_batching/__init__.py,sha256=wtrHtJTWy3KgXKgS_HVTLb_fp4jc49HIJ9im4BSy9w8,904 +transformers/generation/continuous_batching/__pycache__/__init__.cpython-313.pyc,, +transformers/generation/continuous_batching/__pycache__/cache.cpython-313.pyc,, +transformers/generation/continuous_batching/__pycache__/cache_manager.cpython-313.pyc,, +transformers/generation/continuous_batching/__pycache__/continuous_api.cpython-313.pyc,, +transformers/generation/continuous_batching/__pycache__/requests.cpython-313.pyc,, +transformers/generation/continuous_batching/__pycache__/scheduler.cpython-313.pyc,, +transformers/generation/continuous_batching/cache.py,sha256=cfJCyUrB6lwVSolTf1HHYFeNBsqRyszkAScZgL37-uI,31301 +transformers/generation/continuous_batching/cache_manager.py,sha256=5pQSxyCFtaxVWSo2v25p4QlaTKg9ThsFwBm-jvN4Nzo,11924 +transformers/generation/continuous_batching/continuous_api.py,sha256=vXAxb04jsIoOze2MKHn--IXF8nUHuR1TOGI0tT92YRk,45598 +transformers/generation/continuous_batching/requests.py,sha256=UM7p9Gxr-C8UOYL_LH_AgIER_g9ASVFQo9UIGHSyWkA,8538 +transformers/generation/continuous_batching/scheduler.py,sha256=O1zh9JHCmXwmEjFY6_oNvt5aSOPqB-1LVH9mvbOxtJs,13833 +transformers/generation/flax_logits_process.py,sha256=d9K9Np1169WLpcXRnS_MwtWVKNAgDzKSA2fq5rom9FI,23008 +transformers/generation/flax_utils.py,sha256=09KCRqcCYW-BSbvgRDdmFSz1JL39INSnvKicKZ42T64,50642 +transformers/generation/logits_process.py,sha256=ZIFQZin_bwIRV-iT_GTE57WwanIHJ3GAnWeTh8tUpLE,152357 +transformers/generation/stopping_criteria.py,sha256=SVYo5Mh39mf7vZ7UzIO2gN6erCq8l-w5A_bpIZrz7NI,28930 +transformers/generation/streamers.py,sha256=Mj_bPFPCh4225Z_oFLc5wLegJPCUzF83ppojDdh19mA,12985 +transformers/generation/tf_logits_process.py,sha256=q9KY6Fx6pfQ15_7Lm_oAD4_Eyr3ApM4pmhjeNcNeF4M,28639 +transformers/generation/tf_utils.py,sha256=6B48u3CpCO7AEtSdhSxOjNVfqDpzcIIQqNR9NmTUCbo,175687 +transformers/generation/utils.py,sha256=ogAksegu1TYaUk0jjSGXvlQHq8kSl92YiMV-goTWP-8,210102 +transformers/generation/watermarking.py,sha256=dX_al3SIfnORmQrZMgkEzVahdI8WLpW7d4i2rdwakAM,24497 +transformers/hf_argparser.py,sha256=OHuQjhZ-v_xC7-yQ0byk4clszsAdTe4KSCgpOWyOkTY,19814 +transformers/hyperparameter_search.py,sha256=1PGHNbFHqQD8Y0FSWgDec6OxbzJWJCJe2uWDX5r4vwE,4194 +transformers/image_processing_base.py,sha256=w3Zq2w3wjKrG8nj1N4wYZCBa87ldie86zdhmyABw8k8,24947 +transformers/image_processing_utils.py,sha256=dA0f7L3ySYNThEOyTZIVtQxJS4q-1GA0a82PPD1p0kk,13787 +transformers/image_processing_utils_fast.py,sha256=A4pKqeV3Fr5GoYgi9fL5d8iludDpzD22PwMs3a_X0aw,32864 +transformers/image_transforms.py,sha256=iOVfcNSUBNhbTMo7CCa_niljDrCvuCjTf0NHwfsHmG4,41515 +transformers/image_utils.py,sha256=TuSwPrm8nZstiCwOw5ctgooPwa-WIPNKydGW3cgWxH0,37886 +transformers/integrations/__init__.py,sha256=wZwGxaqGuJQ3nXPwOudGvKy0ZxIpQ0IjGIjz86JeM64,9565 +transformers/integrations/__pycache__/__init__.cpython-313.pyc,, +transformers/integrations/__pycache__/accelerate.cpython-313.pyc,, +transformers/integrations/__pycache__/aqlm.cpython-313.pyc,, +transformers/integrations/__pycache__/awq.cpython-313.pyc,, +transformers/integrations/__pycache__/bitnet.cpython-313.pyc,, +transformers/integrations/__pycache__/bitsandbytes.cpython-313.pyc,, +transformers/integrations/__pycache__/deepspeed.cpython-313.pyc,, +transformers/integrations/__pycache__/eager_paged.cpython-313.pyc,, +transformers/integrations/__pycache__/eetq.cpython-313.pyc,, +transformers/integrations/__pycache__/executorch.cpython-313.pyc,, +transformers/integrations/__pycache__/fbgemm_fp8.cpython-313.pyc,, +transformers/integrations/__pycache__/finegrained_fp8.cpython-313.pyc,, +transformers/integrations/__pycache__/flash_attention.cpython-313.pyc,, +transformers/integrations/__pycache__/flash_paged.cpython-313.pyc,, +transformers/integrations/__pycache__/flex_attention.cpython-313.pyc,, +transformers/integrations/__pycache__/fp_quant.cpython-313.pyc,, +transformers/integrations/__pycache__/fsdp.cpython-313.pyc,, +transformers/integrations/__pycache__/ggml.cpython-313.pyc,, +transformers/integrations/__pycache__/higgs.cpython-313.pyc,, +transformers/integrations/__pycache__/hqq.cpython-313.pyc,, +transformers/integrations/__pycache__/hub_kernels.cpython-313.pyc,, +transformers/integrations/__pycache__/integration_utils.cpython-313.pyc,, +transformers/integrations/__pycache__/mistral.cpython-313.pyc,, +transformers/integrations/__pycache__/mxfp4.cpython-313.pyc,, +transformers/integrations/__pycache__/npu_flash_attention.cpython-313.pyc,, +transformers/integrations/__pycache__/peft.cpython-313.pyc,, +transformers/integrations/__pycache__/quanto.cpython-313.pyc,, +transformers/integrations/__pycache__/sdpa_attention.cpython-313.pyc,, +transformers/integrations/__pycache__/sdpa_paged.cpython-313.pyc,, +transformers/integrations/__pycache__/spqr.cpython-313.pyc,, +transformers/integrations/__pycache__/tensor_parallel.cpython-313.pyc,, +transformers/integrations/__pycache__/tiktoken.cpython-313.pyc,, +transformers/integrations/__pycache__/tpu.cpython-313.pyc,, +transformers/integrations/__pycache__/vptq.cpython-313.pyc,, +transformers/integrations/accelerate.py,sha256=nEQ-TMOFaXomnuKMogrzU_J4PNCZM-Bs6GPddPGbuW0,7351 +transformers/integrations/aqlm.py,sha256=T2gpCoj62L5hkyJzm6tJlP_emhJlepezKN4y1HWueVI,4535 +transformers/integrations/awq.py,sha256=gIAEOj3Tepd_eQadBbPkeMpRlHc3it4tDFzQf8XOKF4,20579 +transformers/integrations/bitnet.py,sha256=-AQ7JCa7cOcuq4tGreVgyME_k7i3D5BVUT9OYM-tg-w,15718 +transformers/integrations/bitsandbytes.py,sha256=UR7BOYCcnYDgiyrmzXzLQ5EIdTK8HxtNlDtZMNqeVHo,23901 +transformers/integrations/deepspeed.py,sha256=MFlCBFGbgjAxVhLmfwzJhNjfXB5zbZHIkuWIQjZqbq8,21947 +transformers/integrations/eager_paged.py,sha256=g7xiagZUuQ8Q2KUBx93GaOO9KEl5rtUlxP9efJB4MFk,3103 +transformers/integrations/eetq.py,sha256=wpofdy55HcvaTaOXrO_VMbmG1Rfly-kN1JfzOxw5X0U,5364 +transformers/integrations/executorch.py,sha256=fMMR8i5MIlnKf2WdV0d51SwwGUFEi80GJ5diLAo-Wc8,52326 +transformers/integrations/fbgemm_fp8.py,sha256=jwTi8hC_Y12YSKktXqUktCCtl_qO-Ncx3uvIyFcni1o,12441 +transformers/integrations/finegrained_fp8.py,sha256=AlhRxjh7bjn-0DwonNrWhiXtBQiClrrF3hHJf3dlYLw,15120 +transformers/integrations/flash_attention.py,sha256=hQqmP5RzORGIw1dFTPtqiTOTlNJvYCTMviRvAPY0QUQ,3125 +transformers/integrations/flash_paged.py,sha256=PM-_ZnoakIdt2wvHzTACkypYB8od_Vw2hemTZByj0yw,4207 +transformers/integrations/flex_attention.py,sha256=5SAf4xKtGRQvfRTPPiVrLIL0WuFOcz-i5H6Kc7vPQVg,13705 +transformers/integrations/fp_quant.py,sha256=0Sx__hSavZ03loZVojEt5deAOzCmuYNn_5aa59zSlFI,1803 +transformers/integrations/fsdp.py,sha256=Fs0aCRYb5JZqKhzWPbL8Jgy4VRbgduhhqLdAjzKCqrs,1545 +transformers/integrations/ggml.py,sha256=jM8LXEYkwteFwJRIhonrgME1lMcMB2C_plHB9B7LrOM,30350 +transformers/integrations/higgs.py,sha256=1pVz1EMhoRj-ro2ls8BY_dS2TfaJ_TrXyBg6J6LWH5Q,31542 +transformers/integrations/hqq.py,sha256=GeTogGSqPyrgTvTHzxwt5TZhpc1vRj_lb2DdWy5BKkI,5075 +transformers/integrations/hub_kernels.py,sha256=hF8z7ya-kV9i76gqrzaVvybm4WXrXkmiVCrk_GnqFUs,8290 +transformers/integrations/integration_utils.py,sha256=UwY2zNLq_kXUu8nIrbUjZgqD1y41NtHLPP8wY5LdsQM,113012 +transformers/integrations/mistral.py,sha256=9xon0VjpAnoOcHhsBhQgEiLSN7ptBkNeXBqN9mI4O3s,3970 +transformers/integrations/mxfp4.py,sha256=GvdwUiBPtmpszxRY4S6G-g2zutZR2qg6c5UaUqSAe1U,18439 +transformers/integrations/npu_flash_attention.py,sha256=5YBiW6jSW1dlYP-0dLfQJ0XFcX5zqLRUS6qRlESf5s4,4189 +transformers/integrations/peft.py,sha256=5XIr8Jd_JuNZx4VUTZKZRpyi7TQ8bRqJ6cFfLi6RVkw,28965 +transformers/integrations/quanto.py,sha256=m3tz7fCciceEe3mJc1i8GNVWcKTQ--GopPGwU4ctZ4I,4377 +transformers/integrations/sdpa_attention.py,sha256=3Fq-SamN7DuQJnOd-_LpqPPlJyspFrPC1ARyeskxoBM,5163 +transformers/integrations/sdpa_paged.py,sha256=h58QJ-6ftXQbw0XCFy2L8-KZpJww0eynvJQMkzvJ-KA,2193 +transformers/integrations/spqr.py,sha256=nHTdlyfkCc5vJO60TMZuE9pUiTTPfaqYV7kVLF6PMd0,5525 +transformers/integrations/tensor_parallel.py,sha256=c6Y8fE4XC9wvsE9Km_Z9TdnBogBf4KAqVDnSBEUHv2o,49481 +transformers/integrations/tiktoken.py,sha256=2s3O3_3dsA7pbsz1Lu_eLA2SrloMZWVpg0NklRxPMlY,1627 +transformers/integrations/tpu.py,sha256=JtzQLGX0mnci_xKVxoXPDqrAT_YLSCaw2WK-4IssCu4,1394 +transformers/integrations/vptq.py,sha256=he6YX2nhHu0kBzA9-F6QU-WEjEMi0B7v__zEJTn66X8,4540 +transformers/keras_callbacks.py,sha256=M9ZvQaJ52k79RlgM7WNootiXoD_hNirmiMNaPxI3tik,20611 +transformers/kernels/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +transformers/kernels/__pycache__/__init__.cpython-313.pyc,, +transformers/kernels/deta/cpu/ms_deform_attn_cpu.cpp,sha256=VcCGm9IrvgVvmyZt0KyP16Q-ONmbeg6bKwccP6KadL0,1255 +transformers/kernels/deta/cpu/ms_deform_attn_cpu.h,sha256=nvVsKj9nabQ7IaNY4di5xVx6u-0lIifQvLg2JCoxiik,1138 +transformers/kernels/deta/cuda/ms_deform_attn_cuda.cu,sha256=M5-bW9g5z-upTFMNPIfnyLAqKTxGMCjAPqBr0GmWHX8,7360 +transformers/kernels/deta/cuda/ms_deform_attn_cuda.cuh,sha256=hygB20Vh3RttOSdCuTFz8V0d3CXNp-Q89x22rYmD258,61433 +transformers/kernels/deta/cuda/ms_deform_attn_cuda.h,sha256=rPWOOMo3QyFdB5kMiexpApLFZ4dnRtx4CluEAGwsfO8,1139 +transformers/kernels/deta/cuda/ms_deform_im2col_cuda.cuh,sha256=BRN8-yfSHY8ChLij8jFl2_z2LL0LEFKuVF6Byi-YLAY,54695 +transformers/kernels/deta/ms_deform_attn.h,sha256=H2bBXGyl0R-v2DqGVz11asoRvxbjZ9iWB9djomZTpgY,1837 +transformers/kernels/deta/vision.cpp,sha256=8RvZy7P_MMx5QEszo_MwNODddJLQ8mKcmmMfgLYC_HA,798 +transformers/kernels/falcon_mamba/__init__.py,sha256=bt0j851F1uuH7flSsTvIqdh9zdKVTOVKWt3datb15SI,721 +transformers/kernels/falcon_mamba/__pycache__/__init__.cpython-313.pyc,, +transformers/kernels/falcon_mamba/__pycache__/selective_scan_with_ln_interface.cpython-313.pyc,, +transformers/kernels/falcon_mamba/selective_scan_with_ln_interface.py,sha256=649oJD0sox1I-TCkZuRMjYm3tWQkQ3VoPXLNeOcN_ss,19731 +transformers/kernels/mra/cuda_kernel.cu,sha256=LxxRYTymSoBEQpWXHA0PMzwZwpolcwX7mFAjwU8-ZMc,11678 +transformers/kernels/mra/cuda_kernel.h,sha256=UJvYq_MDzhcp07bZpYcOBn8ZGFcf_Ax1dynuiVTBvmA,1682 +transformers/kernels/mra/cuda_launch.cu,sha256=Ox5MTACriC30CGyn-g1Kb5EgQSMAZSaN6fpit3xLFWc,4072 +transformers/kernels/mra/cuda_launch.h,sha256=RVCkN_euasvgPK0zADNRvRYGWd4ah5l9X-7UG_AcdH8,707 +transformers/kernels/mra/torch_extension.cpp,sha256=N0YdBLVX0lZabckJzV_RYTHS2atCNvn13E4Ivobt25g,1405 +transformers/kernels/rwkv/wkv_cuda.cu,sha256=EvaUrEnw_qr2EjMKP-Pq7VPzFfGlMJnFhdHNLtn1fPU,6219 +transformers/kernels/rwkv/wkv_cuda_bf16.cu,sha256=DG9hTtOAlrnpDFahjt-MmnOxjMuhGU55GPsmV21HtrQ,6633 +transformers/kernels/rwkv/wkv_op.cpp,sha256=qSExhKdT6p3hyaTv5SypCnH_c7EmaX6HbhTcCntvZWg,4022 +transformers/kernels/yoso/common.h,sha256=Tq2rOUtE8Y4DRAUrRISvwIwVI3u8JBf21WgWSAYiDlQ,273 +transformers/kernels/yoso/common_cuda.h,sha256=Sji70AuVcuZSotLF7Gotmun9MJuOHo8wEkxizKXLRtc,258 +transformers/kernels/yoso/common_cuda_device.h,sha256=y6WUgAiapnMKqthRMS5s-DMSWNVkar_i8g4KPFvqiuk,2063 +transformers/kernels/yoso/fast_lsh_cumulation.cu,sha256=LA4LGNgyXT3osIyQtFBcRanSyNQWm8yqmpz7AeLP7cw,19061 +transformers/kernels/yoso/fast_lsh_cumulation.h,sha256=1cTWZjOm751HGiEB5P-UPJ8SE1VO7XRyXmBgyxYDyjI,1575 +transformers/kernels/yoso/fast_lsh_cumulation_cuda.cu,sha256=ryU4sksYZhucllzxUT_gBYlNcKbMr00QggD4S4-LmLY,32875 +transformers/kernels/yoso/fast_lsh_cumulation_cuda.h,sha256=_KGI8HQbVFtCN5KAcSGpyiJ2foGi26RKen138CUc2fY,5490 +transformers/kernels/yoso/fast_lsh_cumulation_torch.cpp,sha256=-Rh7o39Z3rtOPwNnEM-c51TCqywpVdK0WVaA7VRrXbQ,3154 +transformers/loss/__init__.py,sha256=qETsqCwayu6Ymj_J4_A_eiwiaMRHQ0noWKM35naanzc,606 +transformers/loss/__pycache__/__init__.cpython-313.pyc,, +transformers/loss/__pycache__/loss_d_fine.cpython-313.pyc,, +transformers/loss/__pycache__/loss_deformable_detr.cpython-313.pyc,, +transformers/loss/__pycache__/loss_for_object_detection.cpython-313.pyc,, +transformers/loss/__pycache__/loss_grounding_dino.cpython-313.pyc,, +transformers/loss/__pycache__/loss_rt_detr.cpython-313.pyc,, +transformers/loss/__pycache__/loss_utils.cpython-313.pyc,, +transformers/loss/loss_d_fine.py,sha256=pyVihlU1CQraOzUjFLrPXIsVSHxHhCun2SIzvOZFEDs,15881 +transformers/loss/loss_deformable_detr.py,sha256=pUwwrAVxEwa2qamyoTIqlxpll_rBTXCOn67bW73ZKuc,7321 +transformers/loss/loss_for_object_detection.py,sha256=fZuLWKzaCtGvCmlpevpHnIGp4BFIPJIIU4GcFqDO7r0,24581 +transformers/loss/loss_grounding_dino.py,sha256=Efh5GmRzZHjK3ZoNCNCRhU1GV9pcBtHDKxFbrJwr3K0,11190 +transformers/loss/loss_rt_detr.py,sha256=rGk8fFh1qoPgsRL0-vHw3FrjL3wRNV81-XQTFrElTeM,22130 +transformers/loss/loss_utils.py,sha256=-mXxizFn9FmJ16i6hkMqQdMiM4-XicyJsI1dfyDxELc,7046 +transformers/masking_utils.py,sha256=epY_7tgXO4JlKY3SQ1DBZiIs5rRxlEL85Iq0FNBHimo,60515 +transformers/model_debugging_utils.py,sha256=KTIsIVjUcBl1U_VfwLSXUvWEwX1HAJtERjdO93hEfoQ,17090 +transformers/modelcard.py,sha256=CExU8KMQ06EWzVz5APvfYo2WZcvkQXsh5KD53gRTf-o,35828 +transformers/modeling_attn_mask_utils.py,sha256=oIEM72sNYJO_2qNJs63eLmM_06a-UHPYC14IsvEsNUs,21517 +transformers/modeling_flash_attention_utils.py,sha256=KT_oHGvTiqyKO8auCG_yG5aVs0npfJZQsf5h5Co21xA,30112 +transformers/modeling_flax_outputs.py,sha256=1RQh6VTIIVgh2OME-EkUdJc2NdBi5TEXBHCFCupFASs,42188 +transformers/modeling_flax_pytorch_utils.py,sha256=DF0Bi7EoOoH_XTypcePLg8MqLGCvqBgAwtzy_d-Xl_U,21555 +transformers/modeling_flax_utils.py,sha256=TbaDBlsD6A4nh9xkeBW1H7bxYcxsQXA4ZesXY2ReJSc,61240 +transformers/modeling_gguf_pytorch_utils.py,sha256=WFBWZtKwM5G1XD97hsUJX_SX0NGeDAIk8vj-6tW8j4g,22318 +transformers/modeling_layers.py,sha256=vSYvNkrya5k4HbI1g8puGpdB2MOJS8OqCPIE_mp_aO4,11566 +transformers/modeling_outputs.py,sha256=Ltzkmwp_n61TtSOVvs5fsg49D4PDBfOGY1TRnXkKXRQ,109647 +transformers/modeling_rope_utils.py,sha256=VcwMjLdvWSqxeLRmKty99K1QErsLQknAqWYDFB8Y6aw,43192 +transformers/modeling_tf_outputs.py,sha256=6THINWzeA-DZyxAdvE3lu3p3h4ZpeBl8sjrOhj9wb9Y,56248 +transformers/modeling_tf_pytorch_utils.py,sha256=Q7-5aDeI2ec3-NYgH6Fp9OpAMmymqyp1mDZGWYve_Io,27982 +transformers/modeling_tf_utils.py,sha256=Rqc7BVljDxocTRTXu2IuiUtpMAFijjtxXx46dZIDsbE,166141 +transformers/modeling_utils.py,sha256=vxxrKkPPfDb7efN8mBQk3Wrnjrhj_KpdKjfnbJgoYR0,308964 +transformers/models/__init__.py,sha256=0qgVdBF2S0qHhJQDVKZDcTHVh09JH8Zb3G2vZXxV_GI,11385 +transformers/models/__pycache__/__init__.cpython-313.pyc,, +transformers/models/aimv2/__init__.py,sha256=cDli19QT_YABtn4DPLYfoWHtkmOQYGipAgPKGuRje4c,991 +transformers/models/aimv2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/aimv2/__pycache__/configuration_aimv2.cpython-313.pyc,, +transformers/models/aimv2/__pycache__/modeling_aimv2.cpython-313.pyc,, +transformers/models/aimv2/__pycache__/modular_aimv2.cpython-313.pyc,, +transformers/models/aimv2/configuration_aimv2.py,sha256=K0yaVDpIlBtlYi8xDfhpU7ndTASqTu8nV_rCoWAAAyM,13740 +transformers/models/aimv2/modeling_aimv2.py,sha256=uuhyjxaxsGH7H8bS8Zas5o6SLPB5tA723CZGxDQ4Oyk,29017 +transformers/models/aimv2/modular_aimv2.py,sha256=fRGrWR58k611s1Uo7kbp6SsOpbKLIgaRNoUmAeAVsC8,28190 +transformers/models/albert/__init__.py,sha256=WjQ4NtNxKNj7Hvk9lA2OXmdgD_SNFp1wLS2eeL3-WoE,1154 +transformers/models/albert/__pycache__/__init__.cpython-313.pyc,, +transformers/models/albert/__pycache__/configuration_albert.cpython-313.pyc,, +transformers/models/albert/__pycache__/modeling_albert.cpython-313.pyc,, +transformers/models/albert/__pycache__/modeling_flax_albert.cpython-313.pyc,, +transformers/models/albert/__pycache__/modeling_tf_albert.cpython-313.pyc,, +transformers/models/albert/__pycache__/tokenization_albert.cpython-313.pyc,, +transformers/models/albert/__pycache__/tokenization_albert_fast.cpython-313.pyc,, +transformers/models/albert/configuration_albert.py,sha256=1jz6sQm_Ki_o_EHJj7mzULanRt3xFoPv3tt_rQg6Ct4,8162 +transformers/models/albert/modeling_albert.py,sha256=WfQVwAz1_INOKBMAnjE5K0qIA__DUV6PHpkki2J7E-8,57681 +transformers/models/albert/modeling_flax_albert.py,sha256=-QrqU89tM44jANMS-haQlSARp2uxVuUXCpUnlE8lT58,41035 +transformers/models/albert/modeling_tf_albert.py,sha256=nfIkDUf5d0ORzk1vG-3F-PWTyLSt-7uIUsBpdR9r_wg,68993 +transformers/models/albert/tokenization_albert.py,sha256=kV5S_i-EPu2mZ8f1tr7T-IRsd_W_eshGVGO_veSgQi8,13391 +transformers/models/albert/tokenization_albert_fast.py,sha256=m4Xl3Pb038gBQzlGPAvWFf2G3LrEDdPWhASLBrLucz8,7609 +transformers/models/align/__init__.py,sha256=QqTKk-Z4BylY6EkBSlYvKXVhT2te-m2Al626OUAz-r4,1027 +transformers/models/align/__pycache__/__init__.cpython-313.pyc,, +transformers/models/align/__pycache__/configuration_align.cpython-313.pyc,, +transformers/models/align/__pycache__/modeling_align.cpython-313.pyc,, +transformers/models/align/__pycache__/processing_align.cpython-313.pyc,, +transformers/models/align/configuration_align.py,sha256=T3yiaW_5C9VNHouwd-FGLgwFJd8M8h-a9gBIod0Ys2c,15999 +transformers/models/align/modeling_align.py,sha256=9jX2CuQVURXOkZk0c61kO4CP2Umw437JFFbbpKQwXvE,50953 +transformers/models/align/processing_align.py,sha256=oKA2Nsn_NM1Lp5VNuvQNYWQU0Yfzw0KrO2QewAz8cmQ,2526 +transformers/models/altclip/__init__.py,sha256=405IijUCYr1EGvOqg1xzds_GHOlxCl0HCsf1rI0wtPY,1033 +transformers/models/altclip/__pycache__/__init__.cpython-313.pyc,, +transformers/models/altclip/__pycache__/configuration_altclip.cpython-313.pyc,, +transformers/models/altclip/__pycache__/modeling_altclip.cpython-313.pyc,, +transformers/models/altclip/__pycache__/processing_altclip.cpython-313.pyc,, +transformers/models/altclip/configuration_altclip.py,sha256=rxz2vZSDIUBaflQRk31pKKYgfx0vrNu-ZJ2KuJtkz38,18486 +transformers/models/altclip/modeling_altclip.py,sha256=420Q8nX80hx6_O1DNJF86-BGFezU76DDwmiI5FUxuz8,59827 +transformers/models/altclip/processing_altclip.py,sha256=5-gEYEM1fGpG-4bwj1NViJ2c5XNhcbXO-2pELR-7gSQ,1902 +transformers/models/apertus/__init__.py,sha256=zY0UTl5pr-BSjXXoYMrQRQm0vhGnSykqKJ2n7mxl_ig,1271 +transformers/models/apertus/__pycache__/__init__.cpython-313.pyc,, +transformers/models/apertus/__pycache__/configuration_apertus.cpython-313.pyc,, +transformers/models/apertus/__pycache__/modeling_apertus.cpython-313.pyc,, +transformers/models/apertus/__pycache__/modular_apertus.cpython-313.pyc,, +transformers/models/apertus/configuration_apertus.py,sha256=1Hcuelq3r20FYICxtEqyEBut6FlO8BMUdGa203DFfhs,11922 +transformers/models/apertus/modeling_apertus.py,sha256=3mE31jl-Mzxx6qOg1je1Hg-A8qVdyU169jSXnI1O3l0,21171 +transformers/models/apertus/modular_apertus.py,sha256=PfZfJ4CY3UXq7JxxPyYYDNERoyOi_Eu1YFNZWWVTcy0,16833 +transformers/models/arcee/__init__.py,sha256=bysIumYEa1Z1bCLBaaP_SCT_6poh8zFLgxt_4Ib-Diw,1009 +transformers/models/arcee/__pycache__/__init__.cpython-313.pyc,, +transformers/models/arcee/__pycache__/configuration_arcee.cpython-313.pyc,, +transformers/models/arcee/__pycache__/modeling_arcee.cpython-313.pyc,, +transformers/models/arcee/__pycache__/modular_arcee.cpython-313.pyc,, +transformers/models/arcee/configuration_arcee.py,sha256=S4OeiCsHV_Qbow6q6X-ZPwg5UFz7jvpK6r06Gf3KCZw,10764 +transformers/models/arcee/modeling_arcee.py,sha256=F1VlYsQvR6xfKcc8yHXuU6kUgGjdWMaMQ36C6nZbG0Q,21245 +transformers/models/arcee/modular_arcee.py,sha256=pcUF5I9TfPIPjrJpWJKf_odVorR_pLwgKhvSqLMwWpA,10133 +transformers/models/aria/__init__.py,sha256=I3vYPjV-sDl0OAILLADGZ7hUkk9ZsmyZ8CEf9tie_dY,1066 +transformers/models/aria/__pycache__/__init__.cpython-313.pyc,, +transformers/models/aria/__pycache__/configuration_aria.cpython-313.pyc,, +transformers/models/aria/__pycache__/image_processing_aria.cpython-313.pyc,, +transformers/models/aria/__pycache__/modeling_aria.cpython-313.pyc,, +transformers/models/aria/__pycache__/modular_aria.cpython-313.pyc,, +transformers/models/aria/__pycache__/processing_aria.cpython-313.pyc,, +transformers/models/aria/configuration_aria.py,sha256=OfA722tTQm4EEsYxmZM-Wm12cnoH8sQ702bHQSBSBEw,16425 +transformers/models/aria/image_processing_aria.py,sha256=Th8DM67DRmZKNRmZ-c0AeVK4Z-Zm0xkNxX1LX99Ht08,24833 +transformers/models/aria/modeling_aria.py,sha256=tJAG2IzfRlpdKOJusCK28hWNukCovIVo9u9-E5WF_GM,52245 +transformers/models/aria/modular_aria.py,sha256=hqTwYC2YEZdDiiTr6_ghUhSmkTpHWiqz05iG4wmZr3w,70858 +transformers/models/aria/processing_aria.py,sha256=mb4Si17gdG4NyCxmfWS0WTRkvp6nDF9D3JoKvm6ky5c,9291 +transformers/models/audio_spectrogram_transformer/__init__.py,sha256=a_YVwB1p4_PPeqPFWqFsGSGSQVTaSUXY0xsOd_Gflqs,1107 +transformers/models/audio_spectrogram_transformer/__pycache__/__init__.cpython-313.pyc,, +transformers/models/audio_spectrogram_transformer/__pycache__/configuration_audio_spectrogram_transformer.cpython-313.pyc,, +transformers/models/audio_spectrogram_transformer/__pycache__/feature_extraction_audio_spectrogram_transformer.cpython-313.pyc,, +transformers/models/audio_spectrogram_transformer/__pycache__/modeling_audio_spectrogram_transformer.cpython-313.pyc,, +transformers/models/audio_spectrogram_transformer/configuration_audio_spectrogram_transformer.py,sha256=HAhLugn_E6Ajr3-3n3qohG5ifAPqNfSuucQ0B2S7tCM,5901 +transformers/models/audio_spectrogram_transformer/feature_extraction_audio_spectrogram_transformer.py,sha256=vPBynKfjgNCc2c6T3kd5AKxsstjKBI5Z-oNwtRAH7VY,9929 +transformers/models/audio_spectrogram_transformer/modeling_audio_spectrogram_transformer.py,sha256=FLoYNBuazzxtpuRfJnFN-Oks-M81D2ANNuPN3l1ZljU,20590 +transformers/models/auto/__init__.py,sha256=wX3m7QJXMmkNMTL6ef7HH18vXdZ0cgUIkHgpVLpGZ_4,1292 +transformers/models/auto/__pycache__/__init__.cpython-313.pyc,, +transformers/models/auto/__pycache__/auto_factory.cpython-313.pyc,, +transformers/models/auto/__pycache__/configuration_auto.cpython-313.pyc,, +transformers/models/auto/__pycache__/feature_extraction_auto.cpython-313.pyc,, +transformers/models/auto/__pycache__/image_processing_auto.cpython-313.pyc,, +transformers/models/auto/__pycache__/modeling_auto.cpython-313.pyc,, +transformers/models/auto/__pycache__/modeling_flax_auto.cpython-313.pyc,, +transformers/models/auto/__pycache__/modeling_tf_auto.cpython-313.pyc,, +transformers/models/auto/__pycache__/processing_auto.cpython-313.pyc,, +transformers/models/auto/__pycache__/tokenization_auto.cpython-313.pyc,, +transformers/models/auto/__pycache__/video_processing_auto.cpython-313.pyc,, +transformers/models/auto/auto_factory.py,sha256=BMC2EJfFM0Wez3Tui51zCr1P67v6Sp9zhB-3_0nI7uI,47212 +transformers/models/auto/configuration_auto.py,sha256=EgtizU4T3RVXYqzRTPZPyLyhegaPXdAE3f0w-8su9Ks,55606 +transformers/models/auto/feature_extraction_auto.py,sha256=P8qIocXJZJN4aTv7rwC61uKk-62EN1v1KDA5jSS_k0o,20502 +transformers/models/auto/image_processing_auto.py,sha256=NOBKovx-W6s_bpGNjhRHYK9et8nguRBkHgm77PIowns,39079 +transformers/models/auto/modeling_auto.py,sha256=bk-mfIjgKouE1G17FxnnYPGXBzt7IzvPMO61lvWl8Ho,98580 +transformers/models/auto/modeling_flax_auto.py,sha256=jljyZ4H_wWjcxuVbLUDtO0acB104wm78aXyVNeGu_Zk,15709 +transformers/models/auto/modeling_tf_auto.py,sha256=YWaGWUmrGNg5eieun1OTG_EmtzWy8CU_Ebt9gw6mxyw,30313 +transformers/models/auto/processing_auto.py,sha256=6XWA3hic1-XqM3ipbgEnmTnzBcaKAkWpFguAyxqTiWE,20853 +transformers/models/auto/tokenization_auto.py,sha256=UmXm3vWoPBs3hx-kb_KllEDvgLN3wkm8DCE1NTFX9zA,58505 +transformers/models/auto/video_processing_auto.py,sha256=6-1v3ujwY9xgp4qJPUdmS3LpBC6J5Hz4uJbu64T-7Mo,19346 +transformers/models/autoformer/__init__.py,sha256=EzGIA8hECx9XytdzTifaGyGp7hrXqlyP0slqAq8xBNY,1001 +transformers/models/autoformer/__pycache__/__init__.cpython-313.pyc,, +transformers/models/autoformer/__pycache__/configuration_autoformer.cpython-313.pyc,, +transformers/models/autoformer/__pycache__/modeling_autoformer.cpython-313.pyc,, +transformers/models/autoformer/configuration_autoformer.py,sha256=hSn6Waq6CuyDFOxAecr9IhFaq4fEAEHn0uiaC_tsa3s,12192 +transformers/models/autoformer/modeling_autoformer.py,sha256=r2w2SBBn-9-HwAfIh9_PBPCqb9tZqPGOb4zUbu5Pulc,106139 +transformers/models/aya_vision/__init__.py,sha256=-DIHmMjkXOyNGbMtZJkHtLiOzdxOYSrKq4_mmR09cfk,1042 +transformers/models/aya_vision/__pycache__/__init__.cpython-313.pyc,, +transformers/models/aya_vision/__pycache__/configuration_aya_vision.cpython-313.pyc,, +transformers/models/aya_vision/__pycache__/modeling_aya_vision.cpython-313.pyc,, +transformers/models/aya_vision/__pycache__/modular_aya_vision.cpython-313.pyc,, +transformers/models/aya_vision/__pycache__/processing_aya_vision.cpython-313.pyc,, +transformers/models/aya_vision/configuration_aya_vision.py,sha256=oW_qdNAfXS5UnxCVQRtGM8ALI5XzBGRHa3Frx-ECr9s,4790 +transformers/models/aya_vision/modeling_aya_vision.py,sha256=8gOIPuAfXgNYOZWb030nDtX2HyvFeM0rIu7oIFZ6uzY,22642 +transformers/models/aya_vision/modular_aya_vision.py,sha256=9l4GsadTDYSumQA_NnClQJNm9sm_kz-7m-WlKRxuZJk,12867 +transformers/models/aya_vision/processing_aya_vision.py,sha256=voI-Z4FtSKIlEGyQGqkZu3L4aOh22bTX_V9OUVAucWc,11961 +transformers/models/bamba/__init__.py,sha256=gtebRUrAdiwq-rJmlM5qpbtbGEg-xxA3pjivOHJvaRs,1040 +transformers/models/bamba/__pycache__/__init__.cpython-313.pyc,, +transformers/models/bamba/__pycache__/configuration_bamba.cpython-313.pyc,, +transformers/models/bamba/__pycache__/modeling_bamba.cpython-313.pyc,, +transformers/models/bamba/__pycache__/modular_bamba.cpython-313.pyc,, +transformers/models/bamba/configuration_bamba.py,sha256=zo-wvX5wz8wWepdJLwQnm2yyZdpHx0lMsE85Quf8RYE,10134 +transformers/models/bamba/modeling_bamba.py,sha256=v_kOAgtiGNEpIplxDNd_uvTxoaV4IIMInMDFre5aNDw,69530 +transformers/models/bamba/modular_bamba.py,sha256=fOqRuWzszr3vJn0_ivWevFfreCn3ci1gM3gIY4ankMI,55398 +transformers/models/bark/__init__.py,sha256=fIlOQ6RPBARVhUKdjNx2Nvf09azEI6AiPv3lyWjk0Gc,1024 +transformers/models/bark/__pycache__/__init__.cpython-313.pyc,, +transformers/models/bark/__pycache__/configuration_bark.cpython-313.pyc,, +transformers/models/bark/__pycache__/generation_configuration_bark.cpython-313.pyc,, +transformers/models/bark/__pycache__/modeling_bark.cpython-313.pyc,, +transformers/models/bark/__pycache__/processing_bark.cpython-313.pyc,, +transformers/models/bark/configuration_bark.py,sha256=p9Upfi8NXf4bHj9d2C75qseupR7X7F3JYWFATkVFh7c,11907 +transformers/models/bark/generation_configuration_bark.py,sha256=cI5vwf3ll9YIBKiXpb7HKZwu1-wDrhnlktpYy8i9X94,14955 +transformers/models/bark/modeling_bark.py,sha256=gnaJnZP1Pg5pVCAYAy-sAsn7gIGErCf4jd5qzyM-G1o,72124 +transformers/models/bark/processing_bark.py,sha256=z9rHKILwkJtiCPWY5LTOeJv8F8Zc73T2RGsNqmzguU8,15669 +transformers/models/bart/__init__.py,sha256=1_kCOlvj4hcCbNiAsAhH0PYAK4zopuVKAYKZ_64O3_c,1142 +transformers/models/bart/__pycache__/__init__.cpython-313.pyc,, +transformers/models/bart/__pycache__/configuration_bart.cpython-313.pyc,, +transformers/models/bart/__pycache__/modeling_bart.cpython-313.pyc,, +transformers/models/bart/__pycache__/modeling_flax_bart.cpython-313.pyc,, +transformers/models/bart/__pycache__/modeling_tf_bart.cpython-313.pyc,, +transformers/models/bart/__pycache__/tokenization_bart.cpython-313.pyc,, +transformers/models/bart/__pycache__/tokenization_bart_fast.cpython-313.pyc,, +transformers/models/bart/configuration_bart.py,sha256=0BemB9DKkzjpDV-39iIC96OkQHY9sevzmYUmWdG5fHg,18871 +transformers/models/bart/modeling_bart.py,sha256=6C5SWNKoH73NG6-c5mSSw7wr0LloBramAzTIGbazaA8,89905 +transformers/models/bart/modeling_flax_bart.py,sha256=T8dSDHTYEZBOhc5MAgmtCal_ns_hFCOn98b9dPS3Tho,83070 +transformers/models/bart/modeling_tf_bart.py,sha256=Kb_NgfqI4LdvaLE4Ji6vzTqc9P7zdY7ijjr0osDd0lQ,80645 +transformers/models/bart/tokenization_bart.py,sha256=kSDfbiku7CuiLkGRu2WN4rvk4Ub-fIyM7h1tw8W4Ids,16265 +transformers/models/bart/tokenization_bart_fast.py,sha256=KT0ISbLlAUn8i77zti_Oe3yebxVSM65gXGMFS3PaeU8,11275 +transformers/models/barthez/__init__.py,sha256=21WBGVafx-0kV-K_2jBdpBg0NBWsRKJqJowo03g2S9A,1003 +transformers/models/barthez/__pycache__/__init__.cpython-313.pyc,, +transformers/models/barthez/__pycache__/tokenization_barthez.cpython-313.pyc,, +transformers/models/barthez/__pycache__/tokenization_barthez_fast.cpython-313.pyc,, +transformers/models/barthez/tokenization_barthez.py,sha256=_uwi3euB_QpCr9lakhVXRWMK0G-RYTD5WyLhbI4qF6Y,12160 +transformers/models/barthez/tokenization_barthez_fast.py,sha256=7gExI5ls2M0YNtk7Dp5AHW1PAggk5zMzz2tKtQ3x54s,7721 +transformers/models/bartpho/__init__.py,sha256=DN0zgU4dM841Kqqo6wN8FpWFeWYHCBxIq3lxrg5vUoU,958 +transformers/models/bartpho/__pycache__/__init__.cpython-313.pyc,, +transformers/models/bartpho/__pycache__/tokenization_bartpho.cpython-313.pyc,, +transformers/models/bartpho/tokenization_bartpho.py,sha256=fpW_x46y9RWaXd3i1aWRWZN-hAeXnph8DzzLwPWwf10,13619 +transformers/models/beit/__init__.py,sha256=t99cV1TicuPrQlZaHjwkrEi5d7tMQeK7TTooGJIn6-Q,1157 +transformers/models/beit/__pycache__/__init__.cpython-313.pyc,, +transformers/models/beit/__pycache__/configuration_beit.cpython-313.pyc,, +transformers/models/beit/__pycache__/feature_extraction_beit.cpython-313.pyc,, +transformers/models/beit/__pycache__/image_processing_beit.cpython-313.pyc,, +transformers/models/beit/__pycache__/image_processing_beit_fast.cpython-313.pyc,, +transformers/models/beit/__pycache__/modeling_beit.cpython-313.pyc,, +transformers/models/beit/__pycache__/modeling_flax_beit.cpython-313.pyc,, +transformers/models/beit/configuration_beit.py,sha256=zT9actpT-E-p_5LLb6aDvYM8xClvu2pddJWK6wlIfgU,11602 +transformers/models/beit/feature_extraction_beit.py,sha256=I3Hxy2MRCaAr0m4taNn5y8_9_fAXCNpcYZi6gQa5tXY,1284 +transformers/models/beit/image_processing_beit.py,sha256=UW3XGGN7f6NOzQUT2NVskNu0yUPa8b2LBOQ56wzdEAI,24129 +transformers/models/beit/image_processing_beit_fast.py,sha256=NosO8FiZzozJSESNYCfr2woT32YBMGHpOa2CN-c_PE4,8967 +transformers/models/beit/modeling_beit.py,sha256=6cSIUZbuZQlEc02WLF6DeVGa3eTvpdcK-eTcW8tOEXs,64444 +transformers/models/beit/modeling_flax_beit.py,sha256=g6QwQOBdYd5kheWIzaO7Xpok4MFPJWwtopojVj5jLfU,37136 +transformers/models/bert/__init__.py,sha256=8IqoRT5cO4DU3GmQHsJgW-n6MclOZTmho5VYkKDMbnU,1182 +transformers/models/bert/__pycache__/__init__.cpython-313.pyc,, +transformers/models/bert/__pycache__/configuration_bert.cpython-313.pyc,, +transformers/models/bert/__pycache__/modeling_bert.cpython-313.pyc,, +transformers/models/bert/__pycache__/modeling_flax_bert.cpython-313.pyc,, +transformers/models/bert/__pycache__/modeling_tf_bert.cpython-313.pyc,, +transformers/models/bert/__pycache__/tokenization_bert.cpython-313.pyc,, +transformers/models/bert/__pycache__/tokenization_bert_fast.cpython-313.pyc,, +transformers/models/bert/__pycache__/tokenization_bert_tf.cpython-313.pyc,, +transformers/models/bert/configuration_bert.py,sha256=dv6OswIVpNUrWtI7WmM3XaAA8C8ZB-S3Lzs5Jl9LkVk,7314 +transformers/models/bert/modeling_bert.py,sha256=ieXlCYuYogbIg1xsALUrQNbQeKWZTEDzAPmBc5aRN8I,78330 +transformers/models/bert/modeling_flax_bert.py,sha256=xhjDVfsHDHsdFNHwjNRwjHq9wQW5usH-iKpINjBQ7SQ,64027 +transformers/models/bert/modeling_tf_bert.py,sha256=e7HT05UokKQ8fhpOwvksEcSGY0HDsoBAc7Nzb64xOik,94415 +transformers/models/bert/tokenization_bert.py,sha256=Ffkso5F6UuKyFQ_4Ao4op0k9o1Df90qeA9IJpYj2t98,19766 +transformers/models/bert/tokenization_bert_fast.py,sha256=QE60mWbUbQf8D96L5evCqqrN0hRbKz6LKXhg2Hf8T_A,6557 +transformers/models/bert/tokenization_bert_tf.py,sha256=jmvu68QDk-uMMGM3cHF_1n4PtAMf-PLmgk3xtMBzC90,12060 +transformers/models/bert_generation/__init__.py,sha256=sLEyyFf2yI6QflP1lTI9LXUF5PvWBvu-fsaFbjund5I,1059 +transformers/models/bert_generation/__pycache__/__init__.cpython-313.pyc,, +transformers/models/bert_generation/__pycache__/configuration_bert_generation.cpython-313.pyc,, +transformers/models/bert_generation/__pycache__/modeling_bert_generation.cpython-313.pyc,, +transformers/models/bert_generation/__pycache__/tokenization_bert_generation.cpython-313.pyc,, +transformers/models/bert_generation/configuration_bert_generation.py,sha256=KHse7kMgoXgcldz0LMonkb6mmNoVRbQ2U07Q3_p6_fI,6393 +transformers/models/bert_generation/modeling_bert_generation.py,sha256=tK49HE4BS_PeHhgnA4BTj6wcET3kQV07t9-2B2pKXHE,39877 +transformers/models/bert_generation/tokenization_bert_generation.py,sha256=yu2PgCmCenfHfutzdLJioGoI2_8r8dbqBL7WmJ6JTZs,7179 +transformers/models/bert_japanese/__init__.py,sha256=94xfgVPnIQuHQxvmc55_EedJlJQTnHiL4va6Ry6x3LE,964 +transformers/models/bert_japanese/__pycache__/__init__.cpython-313.pyc,, +transformers/models/bert_japanese/__pycache__/tokenization_bert_japanese.cpython-313.pyc,, +transformers/models/bert_japanese/tokenization_bert_japanese.py,sha256=-ehwXShgMWynj6owRC5JCJJOusGVpUFCejcuvbVxPrU,37815 +transformers/models/bertweet/__init__.py,sha256=EZegs0rWTTCiOC_eY-M8eV7bCcwU60dB0HsM1S1VDzQ,959 +transformers/models/bertweet/__pycache__/__init__.cpython-313.pyc,, +transformers/models/bertweet/__pycache__/tokenization_bertweet.cpython-313.pyc,, +transformers/models/bertweet/tokenization_bertweet.py,sha256=9WQZwFonhHf2CvY-r7Xa4TG3x_eLY4BZrNHvmA1hdKg,27007 +transformers/models/big_bird/__init__.py,sha256=3rloOuQNKURURWgk5Td4OBQBAzBdTJ2_fM_CI6yPrV0,1126 +transformers/models/big_bird/__pycache__/__init__.cpython-313.pyc,, +transformers/models/big_bird/__pycache__/configuration_big_bird.cpython-313.pyc,, +transformers/models/big_bird/__pycache__/modeling_big_bird.cpython-313.pyc,, +transformers/models/big_bird/__pycache__/modeling_flax_big_bird.cpython-313.pyc,, +transformers/models/big_bird/__pycache__/tokenization_big_bird.cpython-313.pyc,, +transformers/models/big_bird/__pycache__/tokenization_big_bird_fast.cpython-313.pyc,, +transformers/models/big_bird/configuration_big_bird.py,sha256=qb_lyze6oqg-PSGaAOSUQkHyeg8ApLGP0c76lZ-aFMI,7892 +transformers/models/big_bird/modeling_big_bird.py,sha256=yCfNqleJY-N5gHA9SpO3KcyLXUNQPB6fsX_Zy51yb98,130121 +transformers/models/big_bird/modeling_flax_big_bird.py,sha256=ZNo_0dB2U0PWW2sX_sbg3PBC7DpQv5RMfdpES-ZzTUM,109894 +transformers/models/big_bird/tokenization_big_bird.py,sha256=h0RLGmqUBycIt1lo1gMDd3DNVP4dEz__sF5E4XY23yw,13249 +transformers/models/big_bird/tokenization_big_bird_fast.py,sha256=EmJBuqogH76ZeE9ZjX3DIAloL-MyT-Bl7PpmozcntfA,8946 +transformers/models/bigbird_pegasus/__init__.py,sha256=7zOl1EhO8W2S9jE0FsyEoW8kV6yn5bLA0dspGFM1mLQ,1011 +transformers/models/bigbird_pegasus/__pycache__/__init__.cpython-313.pyc,, +transformers/models/bigbird_pegasus/__pycache__/configuration_bigbird_pegasus.cpython-313.pyc,, +transformers/models/bigbird_pegasus/__pycache__/modeling_bigbird_pegasus.cpython-313.pyc,, +transformers/models/bigbird_pegasus/configuration_bigbird_pegasus.py,sha256=KCIddfmLPMNb3LIrJY9xSCMasUjRHP6WB-jnedBOeNI,19323 +transformers/models/bigbird_pegasus/modeling_bigbird_pegasus.py,sha256=TZrKUlEZPJ3qMfu9JbHiyFGAOliSFiOu1UZcb5yRcLw,142103 +transformers/models/biogpt/__init__.py,sha256=pZxVjmVzt7FXlkMO_5fMg01eyPvvHYXmDA33MKhp6Yk,1032 +transformers/models/biogpt/__pycache__/__init__.cpython-313.pyc,, +transformers/models/biogpt/__pycache__/configuration_biogpt.cpython-313.pyc,, +transformers/models/biogpt/__pycache__/modeling_biogpt.cpython-313.pyc,, +transformers/models/biogpt/__pycache__/modular_biogpt.cpython-313.pyc,, +transformers/models/biogpt/__pycache__/tokenization_biogpt.cpython-313.pyc,, +transformers/models/biogpt/configuration_biogpt.py,sha256=t544SePN3AO-BjY3nzMW4CH-lMAdxs1TombK682z4fI,6215 +transformers/models/biogpt/modeling_biogpt.py,sha256=6xfgWMFljwl7KoaseFyrVGTXkUZEqsJnIFuA4Gq5KUs,42387 +transformers/models/biogpt/modular_biogpt.py,sha256=M2u1Ela5E761p1QfsSK4b3vz0tHwG25fKu71idZyWs4,33693 +transformers/models/biogpt/tokenization_biogpt.py,sha256=8Lh_IRa00R-tyz6kAF4zTr48Yl6AcCfvG54ysueGsVU,12157 +transformers/models/bit/__init__.py,sha256=I9z2RYsPRokD1ycMBRLaesbyMKK4MwLPM5oTles2KmQ,1072 +transformers/models/bit/__pycache__/__init__.cpython-313.pyc,, +transformers/models/bit/__pycache__/configuration_bit.cpython-313.pyc,, +transformers/models/bit/__pycache__/image_processing_bit.cpython-313.pyc,, +transformers/models/bit/__pycache__/image_processing_bit_fast.cpython-313.pyc,, +transformers/models/bit/__pycache__/modeling_bit.cpython-313.pyc,, +transformers/models/bit/configuration_bit.py,sha256=wZFP76CJYV7Hn-M4aSRmXn-SxXAsmUUHOLNHP6By6lI,6295 +transformers/models/bit/image_processing_bit.py,sha256=Q3X6zFqJse0xqu_HQYH1FqB9nNKS58fiZ5j_F-coqW0,15932 +transformers/models/bit/image_processing_bit_fast.py,sha256=JY4UL4OH2nQ8S66PyIYQaLFFjfhc7rIPaA-hCgmAo6Y,1327 +transformers/models/bit/modeling_bit.py,sha256=j9KE1ZGt5_ZHxt901CuIbzmqJxCv8NVQ--oM3yPGg_E,28529 +transformers/models/bitnet/__init__.py,sha256=0u3B40Xd6dJ7J7TBxJzSQWcyUe2ZWJTbT6iaWVod_-A,1018 +transformers/models/bitnet/__pycache__/__init__.cpython-313.pyc,, +transformers/models/bitnet/__pycache__/configuration_bitnet.cpython-313.pyc,, +transformers/models/bitnet/__pycache__/modeling_bitnet.cpython-313.pyc,, +transformers/models/bitnet/__pycache__/modular_bitnet.cpython-313.pyc,, +transformers/models/bitnet/configuration_bitnet.py,sha256=H5hN7Qc_TWBcMr3xmF04oRrg36gd_IIs_X-wUMTs47k,6652 +transformers/models/bitnet/modeling_bitnet.py,sha256=Uh4HNeW_VjQ6BlJEPVlvy92Toba-6WT84wLho9fwLtw,21445 +transformers/models/bitnet/modular_bitnet.py,sha256=5VncHTdQ1vaLC6KxEjUPT0q9eZhc5IFomIzzMaF0gLg,5857 +transformers/models/blenderbot/__init__.py,sha256=kdNRND4x54J18VhDVLH6usun5IblSN_9NYaLZfvaysc,1178 +transformers/models/blenderbot/__pycache__/__init__.cpython-313.pyc,, +transformers/models/blenderbot/__pycache__/configuration_blenderbot.cpython-313.pyc,, +transformers/models/blenderbot/__pycache__/modeling_blenderbot.cpython-313.pyc,, +transformers/models/blenderbot/__pycache__/modeling_flax_blenderbot.cpython-313.pyc,, +transformers/models/blenderbot/__pycache__/modeling_tf_blenderbot.cpython-313.pyc,, +transformers/models/blenderbot/__pycache__/tokenization_blenderbot.cpython-313.pyc,, +transformers/models/blenderbot/__pycache__/tokenization_blenderbot_fast.cpython-313.pyc,, +transformers/models/blenderbot/configuration_blenderbot.py,sha256=NRqxofcj8VdtIozBg7xniPOiyFy1fb3MX4STphPVB8A,18881 +transformers/models/blenderbot/modeling_blenderbot.py,sha256=rR3lMa9PPPfVYp4iD0i96yfDNs6XFHLXRdZhBHz2iY8,73596 +transformers/models/blenderbot/modeling_flax_blenderbot.py,sha256=ETs29jtKfDjSBL-y7VJcdnHPHu_OGsvr4U5vmeVtRo8,65181 +transformers/models/blenderbot/modeling_tf_blenderbot.py,sha256=zzZt4rlKifDEhE7H-EsSDbavzBRCZl2XJE6mQgOe7-4,72662 +transformers/models/blenderbot/tokenization_blenderbot.py,sha256=sfqSGJEl-owb9-MHxG97G4BQdB6rSv9d17y7xlbLJCw,18223 +transformers/models/blenderbot/tokenization_blenderbot_fast.py,sha256=Bn6KZaUr2-LwmvHQmOMo6TfqwLxbm_-AXarQw4iP9bQ,12448 +transformers/models/blenderbot_small/__init__.py,sha256=QsmmBSPdTC43EIyYBwo-xTyJjLLVqm4Cx-KFJ9O2mfE,1214 +transformers/models/blenderbot_small/__pycache__/__init__.cpython-313.pyc,, +transformers/models/blenderbot_small/__pycache__/configuration_blenderbot_small.cpython-313.pyc,, +transformers/models/blenderbot_small/__pycache__/modeling_blenderbot_small.cpython-313.pyc,, +transformers/models/blenderbot_small/__pycache__/modeling_flax_blenderbot_small.cpython-313.pyc,, +transformers/models/blenderbot_small/__pycache__/modeling_tf_blenderbot_small.cpython-313.pyc,, +transformers/models/blenderbot_small/__pycache__/tokenization_blenderbot_small.cpython-313.pyc,, +transformers/models/blenderbot_small/__pycache__/tokenization_blenderbot_small_fast.cpython-313.pyc,, +transformers/models/blenderbot_small/configuration_blenderbot_small.py,sha256=mok0lacLLkSC3LlQ6C6UxDBHZRBWW1pEUGyjAbamVco,18323 +transformers/models/blenderbot_small/modeling_blenderbot_small.py,sha256=eLWgMtmCxKgg-fwRZj6p6UIG0HY3fQ271NIDRejVFL4,71842 +transformers/models/blenderbot_small/modeling_flax_blenderbot_small.py,sha256=4gn7JguFgvff3qE1Yspggo998Ai1ycs6CwJFXJv9zok,66171 +transformers/models/blenderbot_small/modeling_tf_blenderbot_small.py,sha256=9yF-h3X3_ZH-YC4R_WTi4VuQRV8vupHwP6AaNrrs3B4,71604 +transformers/models/blenderbot_small/tokenization_blenderbot_small.py,sha256=ygaUGnIJLrPKz1jOJY9tgL97t9-OUMFegcNJXm9wnRU,7945 +transformers/models/blenderbot_small/tokenization_blenderbot_small_fast.py,sha256=v28mfjk2dPGKH3YLRhol45ZsYFqXTfTCXlPtk4MK9-A,3361 +transformers/models/blip/__init__.py,sha256=aWgKd8B53KWjNBpR7xREMajO43tIo4sRcEjZUGMt8TI,1226 +transformers/models/blip/__pycache__/__init__.cpython-313.pyc,, +transformers/models/blip/__pycache__/configuration_blip.cpython-313.pyc,, +transformers/models/blip/__pycache__/image_processing_blip.cpython-313.pyc,, +transformers/models/blip/__pycache__/image_processing_blip_fast.cpython-313.pyc,, +transformers/models/blip/__pycache__/modeling_blip.cpython-313.pyc,, +transformers/models/blip/__pycache__/modeling_blip_text.cpython-313.pyc,, +transformers/models/blip/__pycache__/modeling_tf_blip.cpython-313.pyc,, +transformers/models/blip/__pycache__/modeling_tf_blip_text.cpython-313.pyc,, +transformers/models/blip/__pycache__/processing_blip.cpython-313.pyc,, +transformers/models/blip/configuration_blip.py,sha256=_i_EKea4_h_16az8_o0jL9-Os0nXbP6iNwQaqrlnz44,14430 +transformers/models/blip/image_processing_blip.py,sha256=-fUxQF6H5f7uyqG-Lfn_xmmvCYIv-RL_Ip4PXbJAqXY,15330 +transformers/models/blip/image_processing_blip_fast.py,sha256=0gEkLRg06PJYQk6gpm15N3nQsPZtstosy_kxMkY8CS8,1312 +transformers/models/blip/modeling_blip.py,sha256=zh9EUjmpmOq0tVgJJt9xCrcjunyzD_P1gYoRtT8O0uk,51405 +transformers/models/blip/modeling_blip_text.py,sha256=bxBV_kL5igbuII8wFAcA_8hob7LT32nDpZ0RcBwiEEY,45452 +transformers/models/blip/modeling_tf_blip.py,sha256=M1B1RZxL9JDZIvhM05QMI0VL6Y3tKqwZdvT9FJM58xE,71427 +transformers/models/blip/modeling_tf_blip_text.py,sha256=asEmINloo3IGhWYpwzPsj7IxjMlOlxe5cDGO3yZpHXU,49960 +transformers/models/blip/processing_blip.py,sha256=JH1-wa01uCvuRSDmCJxBvPNZclZLlPKd0vEmZicjiPc,5377 +transformers/models/blip_2/__init__.py,sha256=kj_6H0rQ7dLoQk-COIb06LlDRnbORu3GLU3m4EdMkAM,1030 +transformers/models/blip_2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/blip_2/__pycache__/configuration_blip_2.cpython-313.pyc,, +transformers/models/blip_2/__pycache__/modeling_blip_2.cpython-313.pyc,, +transformers/models/blip_2/__pycache__/processing_blip_2.cpython-313.pyc,, +transformers/models/blip_2/configuration_blip_2.py,sha256=gIYt9Djj-DlOx0pGWTfQxRocguyX4xFajk_CoKNjL2E,16247 +transformers/models/blip_2/modeling_blip_2.py,sha256=gOFcjlttcPqoZRcYj4FPeLZOtBj9Zq9Q0LHWc4wsumc,94120 +transformers/models/blip_2/processing_blip_2.py,sha256=J1d2x4IDU7FwOkbAII3hKujAFBLVhR6R8kuQLh6bDY8,7097 +transformers/models/bloom/__init__.py,sha256=lcq09Py2vSezUf26aaBG4yp2DpLZ-mAPt-fybvY_C-Q,1073 +transformers/models/bloom/__pycache__/__init__.cpython-313.pyc,, +transformers/models/bloom/__pycache__/configuration_bloom.cpython-313.pyc,, +transformers/models/bloom/__pycache__/modeling_bloom.cpython-313.pyc,, +transformers/models/bloom/__pycache__/modeling_flax_bloom.cpython-313.pyc,, +transformers/models/bloom/__pycache__/tokenization_bloom_fast.cpython-313.pyc,, +transformers/models/bloom/configuration_bloom.py,sha256=O9X_juvNPqEuxlwJQN-jPJqj8d0SROTHERYlSCEf_C4,10216 +transformers/models/bloom/modeling_bloom.py,sha256=Avn4PCiY_L_XH5BMqPMFkMK5k7MiTVWCtWo-zWMv6fw,56073 +transformers/models/bloom/modeling_flax_bloom.py,sha256=JRj-42JcNa40WjWRnQ6Q-aAsZVxd1zuNN59lf1MGinM,30197 +transformers/models/bloom/tokenization_bloom_fast.py,sha256=ViQsNhssK_0gFtEHraJXLRJyHVgjAbBp0tmRbIsviVg,6277 +transformers/models/blt/__init__.py,sha256=Eg5lWtgdEQ8Yld6WH0R7-x3R6hnVQJFqUMEuAooZOyo,1023 +transformers/models/blt/__pycache__/__init__.cpython-313.pyc,, +transformers/models/blt/__pycache__/configuration_blt.cpython-313.pyc,, +transformers/models/blt/__pycache__/modeling_blt.cpython-313.pyc,, +transformers/models/blt/__pycache__/modular_blt.cpython-313.pyc,, +transformers/models/blt/configuration_blt.py,sha256=1BB49KBpUmKqOheBkZCRhKp_pxAUyTx70HRzxM9f1n4,18512 +transformers/models/blt/modeling_blt.py,sha256=X8DGG-Gsxo4sFVyNwDdBU4FSLGT8JpPh399xDYo6Zeo,57670 +transformers/models/blt/modular_blt.py,sha256=XMhPXScy6aWeVjvSgW-UTKqQ3q0NBbVsUtMX7DkqxXU,41128 +transformers/models/bridgetower/__init__.py,sha256=S9u22GAHi1LVcS3OYGBzfBVTjDvk_WU9JZGPTEo6zxw,1146 +transformers/models/bridgetower/__pycache__/__init__.cpython-313.pyc,, +transformers/models/bridgetower/__pycache__/configuration_bridgetower.cpython-313.pyc,, +transformers/models/bridgetower/__pycache__/image_processing_bridgetower.cpython-313.pyc,, +transformers/models/bridgetower/__pycache__/image_processing_bridgetower_fast.cpython-313.pyc,, +transformers/models/bridgetower/__pycache__/modeling_bridgetower.cpython-313.pyc,, +transformers/models/bridgetower/__pycache__/processing_bridgetower.cpython-313.pyc,, +transformers/models/bridgetower/configuration_bridgetower.py,sha256=EtrVtgjiemvmV-CjJnnl98BQGepG77YRYUrK1lMsORU,14416 +transformers/models/bridgetower/image_processing_bridgetower.py,sha256=ZnzyoXhyBxaCVHGXYhNxbjIRHNZGTcSFVjygDwTDfOM,26382 +transformers/models/bridgetower/image_processing_bridgetower_fast.py,sha256=jyj-S-apeUXEblGb4X81ukT1cp_fB4XnNFGv2_eGdG4,10243 +transformers/models/bridgetower/modeling_bridgetower.py,sha256=FY6HCoJUA-cBGZNyyDTsF5M9A8IN0lcLpp0-2O38xvE,85207 +transformers/models/bridgetower/processing_bridgetower.py,sha256=7tHuo9JYsmlcwydROiw7d2iDzPlAND4J_QRjHovc41Q,2612 +transformers/models/bros/__init__.py,sha256=wT0avJ_J50-WK6jOB-6UbgN5kjHiBwG-NNT_iefMXr8,1024 +transformers/models/bros/__pycache__/__init__.cpython-313.pyc,, +transformers/models/bros/__pycache__/configuration_bros.cpython-313.pyc,, +transformers/models/bros/__pycache__/modeling_bros.cpython-313.pyc,, +transformers/models/bros/__pycache__/processing_bros.cpython-313.pyc,, +transformers/models/bros/configuration_bros.py,sha256=9Vgmvk3hZ-VccsOGhB8OlUPjM5ojPufSIBHa2oY4I5I,6418 +transformers/models/bros/modeling_bros.py,sha256=zooxl7vaPiy4pRutRhosGdWBtJIgaxsXK39Fd6ighYI,49220 +transformers/models/bros/processing_bros.py,sha256=_ICaTNW2U3KvBf4aL-k6yspJmQ3n4Kgvxn6HK_MXcNg,1936 +transformers/models/byt5/__init__.py,sha256=O7yXvHyqMZ7stkKX67knnddmJ81pPHoKrY_7NCAauU4,955 +transformers/models/byt5/__pycache__/__init__.cpython-313.pyc,, +transformers/models/byt5/__pycache__/tokenization_byt5.cpython-313.pyc,, +transformers/models/byt5/tokenization_byt5.py,sha256=ALgzHke0kQEe_3bopDv8r2TXfkaq2tQAM61DmzmQ8MU,10046 +transformers/models/camembert/__init__.py,sha256=hfxYgJYchvXLwio03yWsATGmrU2hgKOoiw7gaNoVgj8,1129 +transformers/models/camembert/__pycache__/__init__.cpython-313.pyc,, +transformers/models/camembert/__pycache__/configuration_camembert.cpython-313.pyc,, +transformers/models/camembert/__pycache__/modeling_camembert.cpython-313.pyc,, +transformers/models/camembert/__pycache__/modeling_tf_camembert.cpython-313.pyc,, +transformers/models/camembert/__pycache__/tokenization_camembert.cpython-313.pyc,, +transformers/models/camembert/__pycache__/tokenization_camembert_fast.cpython-313.pyc,, +transformers/models/camembert/configuration_camembert.py,sha256=kf91zHJLL5_C_1OnJwPthKDj_636CNffChcfRs-epqg,7429 +transformers/models/camembert/modeling_camembert.py,sha256=1KxNKOUu94ka8r6o05rLb-lZ8h75e4dnA9jAaUdxSk4,72486 +transformers/models/camembert/modeling_tf_camembert.py,sha256=onjWfIs2bL12BpsnRUkjmBlGUCbDJ_2zsFYd89q6uEQ,81596 +transformers/models/camembert/tokenization_camembert.py,sha256=OaoBvDD4XRNLdCgRGCJCFnjznbVNt_ApfKkGj76WxHM,14075 +transformers/models/camembert/tokenization_camembert_fast.py,sha256=dDsFP_EvbYbt3kDWcj5BXEf-mbYQ-i4VNordbhOk5_E,8159 +transformers/models/canine/__init__.py,sha256=ThkEqO6wPzWCnAplx0EWCUqVaKKsNYQKXQhWfTblEBU,1032 +transformers/models/canine/__pycache__/__init__.cpython-313.pyc,, +transformers/models/canine/__pycache__/configuration_canine.cpython-313.pyc,, +transformers/models/canine/__pycache__/modeling_canine.cpython-313.pyc,, +transformers/models/canine/__pycache__/tokenization_canine.cpython-313.pyc,, +transformers/models/canine/configuration_canine.py,sha256=8Rlt-y-lkY4Jwzi4Aa7NXN4TJtDoQylbogUOjt_q9IA,6584 +transformers/models/canine/modeling_canine.py,sha256=zjvzvge-hXp5EEBU4VSZ9pxo4-pVNG4eTSxbKlXes20,68532 +transformers/models/canine/tokenization_canine.py,sha256=hgz1yAdqqYWg6LkijfpzgataobP19GiOqwcUhQJZvtI,8194 +transformers/models/chameleon/__init__.py,sha256=EJ5kOvTyCFQjjWv9h4CMGHDRjQZ3CJ6j9Ygk2bEylA0,1136 +transformers/models/chameleon/__pycache__/__init__.cpython-313.pyc,, +transformers/models/chameleon/__pycache__/configuration_chameleon.cpython-313.pyc,, +transformers/models/chameleon/__pycache__/image_processing_chameleon.cpython-313.pyc,, +transformers/models/chameleon/__pycache__/image_processing_chameleon_fast.cpython-313.pyc,, +transformers/models/chameleon/__pycache__/modeling_chameleon.cpython-313.pyc,, +transformers/models/chameleon/__pycache__/processing_chameleon.cpython-313.pyc,, +transformers/models/chameleon/configuration_chameleon.py,sha256=hZwZ8pOjKFZa-urI7HFe-Nsk_4rWDKCDxDP-o3NjzvE,13413 +transformers/models/chameleon/image_processing_chameleon.py,sha256=4OTeuwTs678xCTSGsvI72ul7e1jGcUgHdDaAURJKnoU,16937 +transformers/models/chameleon/image_processing_chameleon_fast.py,sha256=4AHafFApd7TobFhfi1mk6xZv8sQD-HTxaxRA6mMzJhM,4042 +transformers/models/chameleon/modeling_chameleon.py,sha256=J9_qPn8mYJYdcGs9Zh2BSJX4eTImaqkZ_u7iOJTxTcM,51113 +transformers/models/chameleon/processing_chameleon.py,sha256=vkwvAURVFBK3if6FCEC5Bx7i9rdF4A_hvQDXJ-djIJw,9170 +transformers/models/chinese_clip/__init__.py,sha256=-koN80ZGdGEDnTkLDGSDlzQ3fZcahTtaOgjtl3sddSE,1202 +transformers/models/chinese_clip/__pycache__/__init__.cpython-313.pyc,, +transformers/models/chinese_clip/__pycache__/configuration_chinese_clip.cpython-313.pyc,, +transformers/models/chinese_clip/__pycache__/feature_extraction_chinese_clip.cpython-313.pyc,, +transformers/models/chinese_clip/__pycache__/image_processing_chinese_clip.cpython-313.pyc,, +transformers/models/chinese_clip/__pycache__/image_processing_chinese_clip_fast.cpython-313.pyc,, +transformers/models/chinese_clip/__pycache__/modeling_chinese_clip.cpython-313.pyc,, +transformers/models/chinese_clip/__pycache__/processing_chinese_clip.cpython-313.pyc,, +transformers/models/chinese_clip/configuration_chinese_clip.py,sha256=4VEBKh2M7m9X6htGKwwiord7klW7QLjR8FS3GoVvfDw,20298 +transformers/models/chinese_clip/feature_extraction_chinese_clip.py,sha256=hZDBWu4SqNaqbxgA6EE-WZd4Qs8tmqPgXQjveRB5bnU,1366 +transformers/models/chinese_clip/image_processing_chinese_clip.py,sha256=CCcoYIcookC5PoPjlUN5kyNl09GSJt6im6yh8g_V7js,15568 +transformers/models/chinese_clip/image_processing_chinese_clip_fast.py,sha256=XkNJybi8fcwuB7_uN33KN61wDRJcNvXSuY7yUKKSr2k,1347 +transformers/models/chinese_clip/modeling_chinese_clip.py,sha256=bc0e6Ncfvkhcymyg0Ny-Pj5Hq4B7b_myREyZO67J_Fg,52243 +transformers/models/chinese_clip/processing_chinese_clip.py,sha256=CgmF_XZu29YZJcYPMoTjsYBFtVzui424BEJ4seTTFV8,2580 +transformers/models/clap/__init__.py,sha256=751udHbsD7FBLGAByjx_8Z4XPLly1MaQQ4wKN_9vbOY,1067 +transformers/models/clap/__pycache__/__init__.cpython-313.pyc,, +transformers/models/clap/__pycache__/configuration_clap.cpython-313.pyc,, +transformers/models/clap/__pycache__/feature_extraction_clap.cpython-313.pyc,, +transformers/models/clap/__pycache__/modeling_clap.cpython-313.pyc,, +transformers/models/clap/__pycache__/processing_clap.cpython-313.pyc,, +transformers/models/clap/configuration_clap.py,sha256=jTnfuJQv3FHs0rP7nfJPMRk4bz6MtsjHXdS1ZL_um-I,18343 +transformers/models/clap/feature_extraction_clap.py,sha256=yQu0PMzMMCfIaP5ZOP3VoNGCHY7auqbZLOLnZ1kW1D8,18836 +transformers/models/clap/modeling_clap.py,sha256=ZpcPwAUCd-hIwAv7YlCfrrZkBFmfxSyhzJv5DvIul1U,82585 +transformers/models/clap/processing_clap.py,sha256=kFR0NoO-uyn6Fy1frJHr6WzQWpcOKr7IXA4rpVKPGh8,2904 +transformers/models/clip/__init__.py,sha256=bkfM4LH7u_ab8C6cctpvdgySHyQmUaSlWphG4CkcQtg,1307 +transformers/models/clip/__pycache__/__init__.cpython-313.pyc,, +transformers/models/clip/__pycache__/configuration_clip.cpython-313.pyc,, +transformers/models/clip/__pycache__/feature_extraction_clip.cpython-313.pyc,, +transformers/models/clip/__pycache__/image_processing_clip.cpython-313.pyc,, +transformers/models/clip/__pycache__/image_processing_clip_fast.cpython-313.pyc,, +transformers/models/clip/__pycache__/modeling_clip.cpython-313.pyc,, +transformers/models/clip/__pycache__/modeling_flax_clip.cpython-313.pyc,, +transformers/models/clip/__pycache__/modeling_tf_clip.cpython-313.pyc,, +transformers/models/clip/__pycache__/processing_clip.cpython-313.pyc,, +transformers/models/clip/__pycache__/tokenization_clip.cpython-313.pyc,, +transformers/models/clip/__pycache__/tokenization_clip_fast.cpython-313.pyc,, +transformers/models/clip/configuration_clip.py,sha256=HNLMByhpe6KnDBrOzMUcyFUYwKMIZXKdMVhrkVG52vs,18888 +transformers/models/clip/feature_extraction_clip.py,sha256=45gMszIrxGAwWmVEjEOF7GmpoWAkUnG9YQnb60wT_7I,1284 +transformers/models/clip/image_processing_clip.py,sha256=0sYNbCguBzHBUXHUQjZcbQbKaIXzUjx5HSk5DW80bso,17029 +transformers/models/clip/image_processing_clip_fast.py,sha256=19Xm-DXHVL7IU8xmCwJJEuiiIMDFwWw0uBQFaVAi4So,1407 +transformers/models/clip/modeling_clip.py,sha256=PneamYJ2GNbm1XWD7E9cDXZTId-lBUpP_Kala7AE1Jk,50315 +transformers/models/clip/modeling_flax_clip.py,sha256=LrZPnOAh57jEZwhEMXzX1hsPf_RliNPYtjsnMYk_TOg,50791 +transformers/models/clip/modeling_tf_clip.py,sha256=NP2F259L4n_ZQ8ZwRlhgAVVZAk3v8AhbyCuXLs2Dd8s,60318 +transformers/models/clip/processing_clip.py,sha256=IfG-k1WbxMIWeKp2ALOZalXC5GVFvv8eBgodVCrmaYM,2606 +transformers/models/clip/tokenization_clip.py,sha256=-WrPr-t8Kr-HXnL3tdXbj_FFladaDPLMNk78I7ROK5Y,20554 +transformers/models/clip/tokenization_clip_fast.py,sha256=Z9_w8hpW0NwYpF-HZPP6-gLuKwrq_rcHYcliP5EU3VM,6766 +transformers/models/clipseg/__init__.py,sha256=12Y-b3sRDKM3Hy8-6rK4GUF2a91V1S3nLUF7559AALw,1033 +transformers/models/clipseg/__pycache__/__init__.cpython-313.pyc,, +transformers/models/clipseg/__pycache__/configuration_clipseg.cpython-313.pyc,, +transformers/models/clipseg/__pycache__/modeling_clipseg.cpython-313.pyc,, +transformers/models/clipseg/__pycache__/processing_clipseg.cpython-313.pyc,, +transformers/models/clipseg/configuration_clipseg.py,sha256=QZ_ztGrVTi66G9yZNkLPlBiSzqbXgF1IBh_w-57H5Hk,18843 +transformers/models/clipseg/modeling_clipseg.py,sha256=zNnNRCWFEMSbuDlARKREFBVAI5sWizu-AMtlKvRsGm4,57753 +transformers/models/clipseg/processing_clipseg.py,sha256=D93N18jf-fY0eca3gODLMTZOWEkTpWTRYBlf2IsMPFw,7033 +transformers/models/clvp/__init__.py,sha256=RRnPofxkr_llgSxCP9tcAhu3xCR7E_m1PkrHv7KLMzo,1104 +transformers/models/clvp/__pycache__/__init__.cpython-313.pyc,, +transformers/models/clvp/__pycache__/configuration_clvp.cpython-313.pyc,, +transformers/models/clvp/__pycache__/feature_extraction_clvp.cpython-313.pyc,, +transformers/models/clvp/__pycache__/modeling_clvp.cpython-313.pyc,, +transformers/models/clvp/__pycache__/number_normalizer.cpython-313.pyc,, +transformers/models/clvp/__pycache__/processing_clvp.cpython-313.pyc,, +transformers/models/clvp/__pycache__/tokenization_clvp.cpython-313.pyc,, +transformers/models/clvp/configuration_clvp.py,sha256=-coBzCPB5o6HbwdQ-b0JWxMERxkAWPFM4mTI5fazcQs,20257 +transformers/models/clvp/feature_extraction_clvp.py,sha256=hkNsHttIXyTgcApA4nFAPMN6cVpSkRPKue1e8lDdYiA,10997 +transformers/models/clvp/modeling_clvp.py,sha256=EioYV6nROj2IpPWNopwtel_xus-u49ncztVNBaSdXxs,86704 +transformers/models/clvp/number_normalizer.py,sha256=0zNI1TWJCMJ4i9VxrirCDeX_wjEV02G0_Ig8xdmD-LY,8933 +transformers/models/clvp/processing_clvp.py,sha256=3ObNqP0yjmgXTqQLsggFloJ_LS3rxHd1mRkcbtscMxE,2310 +transformers/models/clvp/tokenization_clvp.py,sha256=G5O6ykpxfuJLaej8mgkjty3UNEnTL47e5RfvVVJ7P8Q,14808 +transformers/models/code_llama/__init__.py,sha256=aZJA9qTifG-RGtJKMzfspfxuQkaBryVva7Ah_uGNMoM,1009 +transformers/models/code_llama/__pycache__/__init__.cpython-313.pyc,, +transformers/models/code_llama/__pycache__/tokenization_code_llama.cpython-313.pyc,, +transformers/models/code_llama/__pycache__/tokenization_code_llama_fast.cpython-313.pyc,, +transformers/models/code_llama/tokenization_code_llama.py,sha256=BLEp72WT3fN8px65zzBn9bcwv30ThoDflE2267TJ8Xk,19314 +transformers/models/code_llama/tokenization_code_llama_fast.py,sha256=1ca69b8iic_Q61Fy9frmt6maw8DnQxxSBhBexILkltw,15832 +transformers/models/codegen/__init__.py,sha256=NeUIbS8szfu5R9-7CX_G6730RHOODzTfmrapJH2ApMk,1080 +transformers/models/codegen/__pycache__/__init__.cpython-313.pyc,, +transformers/models/codegen/__pycache__/configuration_codegen.cpython-313.pyc,, +transformers/models/codegen/__pycache__/modeling_codegen.cpython-313.pyc,, +transformers/models/codegen/__pycache__/tokenization_codegen.cpython-313.pyc,, +transformers/models/codegen/__pycache__/tokenization_codegen_fast.cpython-313.pyc,, +transformers/models/codegen/configuration_codegen.py,sha256=c0KsyyBNKsPFMf2TL_5lOS_DztoS-cOphRasNOI2e5I,9574 +transformers/models/codegen/modeling_codegen.py,sha256=nvJ4MbCPQYvzzpQyo7Jj8zaR85cdSjxp8PYJRvV6PuA,29290 +transformers/models/codegen/tokenization_codegen.py,sha256=ih2YU6A9RW2oPI6dyXsxx8WUHeKcibwmtyWOu3vbuHY,15365 +transformers/models/codegen/tokenization_codegen_fast.py,sha256=wiHxam7coCyQR5L1LAgcYCBgwCUUhaLZwyF2lr7WK7w,9650 +transformers/models/cohere/__init__.py,sha256=1Tg-6WGc5wgGduSR__N-jGZvPje9kNs92DW78vN0Auo,1037 +transformers/models/cohere/__pycache__/__init__.cpython-313.pyc,, +transformers/models/cohere/__pycache__/configuration_cohere.cpython-313.pyc,, +transformers/models/cohere/__pycache__/modeling_cohere.cpython-313.pyc,, +transformers/models/cohere/__pycache__/modular_cohere.cpython-313.pyc,, +transformers/models/cohere/__pycache__/tokenization_cohere_fast.cpython-313.pyc,, +transformers/models/cohere/configuration_cohere.py,sha256=cuNOpFFmlh-aqWbY7M771gr1Efo4ScohzJ7AXN1cXAs,11162 +transformers/models/cohere/modeling_cohere.py,sha256=pTI8cb83lrxxK53wWfbY569hlQ7aiCkfO9s9ZVtI9wI,24353 +transformers/models/cohere/modular_cohere.py,sha256=5tyCfCQa3dIATUA7-Vizwgqkhhp_L410b8KBZgrJt50,16189 +transformers/models/cohere/tokenization_cohere_fast.py,sha256=XJdkistG_oFiEQv-5Eyq5Sw2hiTrob-7EnCb3cwFmuU,28790 +transformers/models/cohere2/__init__.py,sha256=6Cx_c-uTSNopbO3NLWCgMmEB2-5hzkrunUWmMrb8YSU,1011 +transformers/models/cohere2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/cohere2/__pycache__/configuration_cohere2.cpython-313.pyc,, +transformers/models/cohere2/__pycache__/modeling_cohere2.cpython-313.pyc,, +transformers/models/cohere2/__pycache__/modular_cohere2.cpython-313.pyc,, +transformers/models/cohere2/configuration_cohere2.py,sha256=UZRlLA6reKjnY30TWxFAdVCoS7UfHAAT2EMc5bbCsNs,12648 +transformers/models/cohere2/modeling_cohere2.py,sha256=RxdyQCFjzqvKrcrbn6TGLD_uVFw2Pf-3Cz-eCvOfIrw,23695 +transformers/models/cohere2/modular_cohere2.py,sha256=z6NFZWrQ4FeQNkxe0TrFjElmLrIxlIS-CzoLv1f7uW0,20589 +transformers/models/cohere2_vision/__init__.py,sha256=VZPnI0ugmeUt8tHNLXhSF1X4maFa0T4pqnm3VAPEyo0,1110 +transformers/models/cohere2_vision/__pycache__/__init__.cpython-313.pyc,, +transformers/models/cohere2_vision/__pycache__/configuration_cohere2_vision.cpython-313.pyc,, +transformers/models/cohere2_vision/__pycache__/image_processing_cohere2_vision_fast.cpython-313.pyc,, +transformers/models/cohere2_vision/__pycache__/modeling_cohere2_vision.cpython-313.pyc,, +transformers/models/cohere2_vision/__pycache__/modular_cohere2_vision.cpython-313.pyc,, +transformers/models/cohere2_vision/__pycache__/processing_cohere2_vision.cpython-313.pyc,, +transformers/models/cohere2_vision/configuration_cohere2_vision.py,sha256=-TEQe4n_754S1-kA224CiSpwDccDWeqAgBRQiN6zZZ8,3516 +transformers/models/cohere2_vision/image_processing_cohere2_vision_fast.py,sha256=4YW3e9YyVNhGeEBvZcOIxF-7bN96GDm5pXRdRwr9Ev4,13715 +transformers/models/cohere2_vision/modeling_cohere2_vision.py,sha256=wbWEJptc_V1PS-3VYRa1Ls1EJ3F1OU8Tk_Tzw627Mcw,18299 +transformers/models/cohere2_vision/modular_cohere2_vision.py,sha256=1vlwhIUXnBSYy1n-MyNhsZqxXS0McwnJt---5sZmvl8,12991 +transformers/models/cohere2_vision/processing_cohere2_vision.py,sha256=V12UAVRARxMMXrp9uQC16mkdYxGp_LanFeCoEc_Pzrw,9865 +transformers/models/colpali/__init__.py,sha256=eG-nOojo-DPkgZJACn6hbJqqfnGE97uKmLkpWVin66A,1033 +transformers/models/colpali/__pycache__/__init__.cpython-313.pyc,, +transformers/models/colpali/__pycache__/configuration_colpali.cpython-313.pyc,, +transformers/models/colpali/__pycache__/modeling_colpali.cpython-313.pyc,, +transformers/models/colpali/__pycache__/modular_colpali.cpython-313.pyc,, +transformers/models/colpali/__pycache__/processing_colpali.cpython-313.pyc,, +transformers/models/colpali/configuration_colpali.py,sha256=ejhl_-iGVK5Sk7jw2e0p_0JA7RfYUVlt63R_eOZ6LNg,4300 +transformers/models/colpali/modeling_colpali.py,sha256=7fV_cF70INbO3hlI_eCyL1Snh0vGuyuJie5VAU0O03Q,8579 +transformers/models/colpali/modular_colpali.py,sha256=3zgydZWrbU4YUxcPdlY_it1YLYGCSmk9xdgY2HdFHuk,15881 +transformers/models/colpali/processing_colpali.py,sha256=JmOoHe5JAdwXSSvV4yi6C5qCII-0getna25Orxil6Zo,19360 +transformers/models/colqwen2/__init__.py,sha256=GBrOYGkXcXTOuCd6AhVMss6TVb2igEKFcrAkgJbbg-Q,1036 +transformers/models/colqwen2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/colqwen2/__pycache__/configuration_colqwen2.cpython-313.pyc,, +transformers/models/colqwen2/__pycache__/modeling_colqwen2.cpython-313.pyc,, +transformers/models/colqwen2/__pycache__/modular_colqwen2.cpython-313.pyc,, +transformers/models/colqwen2/__pycache__/processing_colqwen2.cpython-313.pyc,, +transformers/models/colqwen2/configuration_colqwen2.py,sha256=nPtBtiR57sOIGsOuzTR694zFDWU1nFWvweT_Gz68Sgg,3664 +transformers/models/colqwen2/modeling_colqwen2.py,sha256=M2mVEf9CX9T12UcpTRmpJDsp3ppgBa2diMqPgaFArA8,10925 +transformers/models/colqwen2/modular_colqwen2.py,sha256=VEoStvbsJbA2UEOcl4lqUUF7M3ZIRw4p2U9PCyA_WOs,18936 +transformers/models/colqwen2/processing_colqwen2.py,sha256=HbD73OrOfu_5D5QYHFkXgjVbIUkViYgDOAzjP1_7o6s,19753 +transformers/models/conditional_detr/__init__.py,sha256=p8luCb38qMZvKdI7GLvBTx1eiKGFMv8Obd5iKaqoVe8,1179 +transformers/models/conditional_detr/__pycache__/__init__.cpython-313.pyc,, +transformers/models/conditional_detr/__pycache__/configuration_conditional_detr.cpython-313.pyc,, +transformers/models/conditional_detr/__pycache__/feature_extraction_conditional_detr.cpython-313.pyc,, +transformers/models/conditional_detr/__pycache__/image_processing_conditional_detr.cpython-313.pyc,, +transformers/models/conditional_detr/__pycache__/image_processing_conditional_detr_fast.cpython-313.pyc,, +transformers/models/conditional_detr/__pycache__/modeling_conditional_detr.cpython-313.pyc,, +transformers/models/conditional_detr/__pycache__/modular_conditional_detr.cpython-313.pyc,, +transformers/models/conditional_detr/configuration_conditional_detr.py,sha256=J0SMZqyKvQ0Bu_og74WVqwm2MuxmYOZQFe1Iw-fVliE,13739 +transformers/models/conditional_detr/feature_extraction_conditional_detr.py,sha256=QwZ7PwpcYVGjSFPluSXbT5oTGM4UgeYSL_q-sybwHgY,1676 +transformers/models/conditional_detr/image_processing_conditional_detr.py,sha256=snpIICT7p1T0M5vQ2PgzOfwoxqAbpt5sg4yVlH8CFfg,85851 +transformers/models/conditional_detr/image_processing_conditional_detr_fast.py,sha256=EwJ6_AjJQTjIkwyaFzoTSE1a_N8G_i7X4Ex3u7J12es,47135 +transformers/models/conditional_detr/modeling_conditional_detr.py,sha256=HWEg5Get6oWAyGazkv2Kb7HKeu29G2cvCy_z2qfvfsY,93497 +transformers/models/conditional_detr/modular_conditional_detr.py,sha256=MjXfARyyPMw0WHVw0tTzI5ZibAUt3mwnn5G_ryazj2E,6040 +transformers/models/convbert/__init__.py,sha256=x1Rv5-rurTKFifp3w8N_CNcZ3sHvuFwqpw_Zn1BAenw,1124 +transformers/models/convbert/__pycache__/__init__.cpython-313.pyc,, +transformers/models/convbert/__pycache__/configuration_convbert.cpython-313.pyc,, +transformers/models/convbert/__pycache__/modeling_convbert.cpython-313.pyc,, +transformers/models/convbert/__pycache__/modeling_tf_convbert.cpython-313.pyc,, +transformers/models/convbert/__pycache__/tokenization_convbert.cpython-313.pyc,, +transformers/models/convbert/__pycache__/tokenization_convbert_fast.cpython-313.pyc,, +transformers/models/convbert/configuration_convbert.py,sha256=Ml8UytHYCv4-BAeyPTUzUY5ynnX8dIkCIGoumPTqaCc,6895 +transformers/models/convbert/modeling_convbert.py,sha256=joIep9a30otg9vaivmayReL8uZtEke5ZZF8-CZNdZrs,58353 +transformers/models/convbert/modeling_tf_convbert.py,sha256=mvR9901jo5L-GOvp6QtoY0nORRpwRURQpSxEs3T5X5c,61487 +transformers/models/convbert/tokenization_convbert.py,sha256=nt9KjwTDvJV-IGV_7PsznPWF1_uZj2aR3oL9-Ie56VM,20170 +transformers/models/convbert/tokenization_convbert_fast.py,sha256=3TKalVIkHf4iWJ5mxhbdjS7s6svBsEQUWVdhLSK07G8,6686 +transformers/models/convnext/__init__.py,sha256=QAUm2k3PH0pqhHzPXIhkEmqzMWKYCs4bo0gNVnH_bBw,1179 +transformers/models/convnext/__pycache__/__init__.cpython-313.pyc,, +transformers/models/convnext/__pycache__/configuration_convnext.cpython-313.pyc,, +transformers/models/convnext/__pycache__/feature_extraction_convnext.cpython-313.pyc,, +transformers/models/convnext/__pycache__/image_processing_convnext.cpython-313.pyc,, +transformers/models/convnext/__pycache__/image_processing_convnext_fast.cpython-313.pyc,, +transformers/models/convnext/__pycache__/modeling_convnext.cpython-313.pyc,, +transformers/models/convnext/__pycache__/modeling_tf_convnext.cpython-313.pyc,, +transformers/models/convnext/configuration_convnext.py,sha256=9m5oXY9Vx3BdADw0k7EeM4n3KrkCb3JA-WOPUlbtr74,6192 +transformers/models/convnext/feature_extraction_convnext.py,sha256=7oC8UpEVxpiIhXIC8Rc3I5YnJExAPEEezSdAUnn1hnw,1316 +transformers/models/convnext/image_processing_convnext.py,sha256=Q3BqeXX2Ykr-jCDDdyu0aGnQSMIOo8nB5s-cUrQJxmE,16035 +transformers/models/convnext/image_processing_convnext_fast.py,sha256=QLmJ_Le-RbJP8ffBoWewA9aR09mcMqUpfXOCLJH2Y90,6954 +transformers/models/convnext/modeling_convnext.py,sha256=L8Uv2r8bwyLt1PAtTIndqFKX552S4tG7roiBO160S3Q,17059 +transformers/models/convnext/modeling_tf_convnext.py,sha256=ff7TMHAtYN0Isoaivj1T34Mhzbvx_94YRFZqi8C3u5A,27197 +transformers/models/convnextv2/__init__.py,sha256=kOl9JbYIk9ioImF_hd0BS_mGDC8SG2k5LvO0-7WroRo,1043 +transformers/models/convnextv2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/convnextv2/__pycache__/configuration_convnextv2.cpython-313.pyc,, +transformers/models/convnextv2/__pycache__/modeling_convnextv2.cpython-313.pyc,, +transformers/models/convnextv2/__pycache__/modeling_tf_convnextv2.cpython-313.pyc,, +transformers/models/convnextv2/configuration_convnextv2.py,sha256=bUeneirJLhh_eL1rQZ1Mk_ZSCCzJlg-CwcNNEk0fVjY,5564 +transformers/models/convnextv2/modeling_convnextv2.py,sha256=dgIhYU2Qyi5CyGslL2te76QGgVxUaPtAu1wl7XgKlFc,18548 +transformers/models/convnextv2/modeling_tf_convnextv2.py,sha256=KwiYjUc4VGEjurV9SsGN_V28RiGYPn7g7v1MgRgOQ5A,27603 +transformers/models/cpm/__init__.py,sha256=5Oz79wRruzXHciBLUAOGeo6PIH70Vs4ta8ffsMyT1Yg,995 +transformers/models/cpm/__pycache__/__init__.cpython-313.pyc,, +transformers/models/cpm/__pycache__/tokenization_cpm.cpython-313.pyc,, +transformers/models/cpm/__pycache__/tokenization_cpm_fast.cpython-313.pyc,, +transformers/models/cpm/tokenization_cpm.py,sha256=G96H4tqTADmXfKU2Qc4JEd_17nQIfNQCR20AURA1InQ,15125 +transformers/models/cpm/tokenization_cpm_fast.py,sha256=HHWhb-2cny2nXVnR3EIS4FTK8uXQI2o6c1fNzkLnAK8,10310 +transformers/models/cpmant/__init__.py,sha256=RfkbbhNqdbioJ5XVaTtxBLnZRt1GFnXugS3UFXHYV0c,1032 +transformers/models/cpmant/__pycache__/__init__.cpython-313.pyc,, +transformers/models/cpmant/__pycache__/configuration_cpmant.cpython-313.pyc,, +transformers/models/cpmant/__pycache__/modeling_cpmant.cpython-313.pyc,, +transformers/models/cpmant/__pycache__/tokenization_cpmant.cpython-313.pyc,, +transformers/models/cpmant/configuration_cpmant.py,sha256=RvgmQH8lQazRopzpfK5-Hf4eePtXXfvMJ3ar1VQC2vE,5145 +transformers/models/cpmant/modeling_cpmant.py,sha256=dEJ3bpkh4RfX8v6Erm5IjJYLv9N86P7ahaYR4AsP7as,33547 +transformers/models/cpmant/tokenization_cpmant.py,sha256=gQFg95hgeqMgArOTSG1xT-JHXxKZfF_EZRzH9f5szYI,9744 +transformers/models/csm/__init__.py,sha256=n-AQHwxZwD8imEHipiQoTDRf_OMo5zJhQ0tKKWMCPYs,1021 +transformers/models/csm/__pycache__/__init__.cpython-313.pyc,, +transformers/models/csm/__pycache__/configuration_csm.cpython-313.pyc,, +transformers/models/csm/__pycache__/generation_csm.cpython-313.pyc,, +transformers/models/csm/__pycache__/modeling_csm.cpython-313.pyc,, +transformers/models/csm/__pycache__/modular_csm.cpython-313.pyc,, +transformers/models/csm/__pycache__/processing_csm.cpython-313.pyc,, +transformers/models/csm/configuration_csm.py,sha256=xhRBdgZbc76V3OMJAcF4EL9wQEi9FVQV8g3f_ZuwxGo,23791 +transformers/models/csm/generation_csm.py,sha256=gOHz4xDLglsvYr3wm2laGk8eoLC64oTPVHUeOEOfqhc,25683 +transformers/models/csm/modeling_csm.py,sha256=8cQnIsqH_rZB9VoToaP-wcStfoCC66lRF_EAE3me2Rw,50911 +transformers/models/csm/modular_csm.py,sha256=3XUI2xwwQMSvqDPPEDz_08Q74nSU8tcAeoRRTw6brlE,35456 +transformers/models/csm/processing_csm.py,sha256=yyKywlzmATo-kDMRWAIWT4WYLwO96GNs4OI-NqmlcVk,16569 +transformers/models/ctrl/__init__.py,sha256=bVtGijL4n9ewNyhcJt7lpsRhXU8yo4nY0xIlRbpismk,1062 +transformers/models/ctrl/__pycache__/__init__.cpython-313.pyc,, +transformers/models/ctrl/__pycache__/configuration_ctrl.cpython-313.pyc,, +transformers/models/ctrl/__pycache__/modeling_ctrl.cpython-313.pyc,, +transformers/models/ctrl/__pycache__/modeling_tf_ctrl.cpython-313.pyc,, +transformers/models/ctrl/__pycache__/tokenization_ctrl.cpython-313.pyc,, +transformers/models/ctrl/configuration_ctrl.py,sha256=Vg6ZFqal5MCr-t2K5pp5mtN2TJSeojgKL8IgbZkd81k,4684 +transformers/models/ctrl/modeling_ctrl.py,sha256=RiE6fmDL2LD_l5b7EAVXEOmVaTZjBl8hHkh861v9CBc,32482 +transformers/models/ctrl/modeling_tf_ctrl.py,sha256=7C954SzmkF6m4uraWukQ2Q_vhD3slx7usn9AXI26YsE,39279 +transformers/models/ctrl/tokenization_ctrl.py,sha256=N6D_85X_YHRvhltFZMVdR5M4ahJWyUvCuUvcFYOL5Lw,8080 +transformers/models/cvt/__init__.py,sha256=i1847SsjrXEIbrXsDEAiUlrtgLZRHtCSVG0rvCPXE9I,1022 +transformers/models/cvt/__pycache__/__init__.cpython-313.pyc,, +transformers/models/cvt/__pycache__/configuration_cvt.cpython-313.pyc,, +transformers/models/cvt/__pycache__/modeling_cvt.cpython-313.pyc,, +transformers/models/cvt/__pycache__/modeling_tf_cvt.cpython-313.pyc,, +transformers/models/cvt/configuration_cvt.py,sha256=zVX0Ht69OHm8ttnbAYbzxtV0kNDKV_qpbSDwToqJMKI,6684 +transformers/models/cvt/modeling_cvt.py,sha256=is4hLVrsOTLO7fEbc_EWzrBwaVzC6W8eVmK0ZoULOKc,25868 +transformers/models/cvt/modeling_tf_cvt.py,sha256=cytBTMqcUMvyCbJnEtZY3att6LkoJqDlAN-iX8ucyAQ,43455 +transformers/models/d_fine/__init__.py,sha256=1gNscomeWytwZT7K2GJBwyXxDkfVNLhRjuDwyde2A0s,995 +transformers/models/d_fine/__pycache__/__init__.cpython-313.pyc,, +transformers/models/d_fine/__pycache__/configuration_d_fine.cpython-313.pyc,, +transformers/models/d_fine/__pycache__/modeling_d_fine.cpython-313.pyc,, +transformers/models/d_fine/__pycache__/modular_d_fine.cpython-313.pyc,, +transformers/models/d_fine/configuration_d_fine.py,sha256=mFg_xcD2t7-evibX5REKeCKg3RKZIdBQsSRtGWnYN6E,22689 +transformers/models/d_fine/modeling_d_fine.py,sha256=1YobSISkhsj7hsTblLPNFk2NTh-rjHaY-FLzNlKkuE0,105555 +transformers/models/d_fine/modular_d_fine.py,sha256=EhO8F0myWi1z7gPH7J2UsoR6Wi6Ohav2KvAqZaIgsGo,57065 +transformers/models/dab_detr/__init__.py,sha256=ZvNYPQyXWplaRQIxFR8CURcsnu_HRPXrwojF5nTmGd4,998 +transformers/models/dab_detr/__pycache__/__init__.cpython-313.pyc,, +transformers/models/dab_detr/__pycache__/configuration_dab_detr.cpython-313.pyc,, +transformers/models/dab_detr/__pycache__/modeling_dab_detr.cpython-313.pyc,, +transformers/models/dab_detr/configuration_dab_detr.py,sha256=LOpJZP2nJtYS9yU2tshuqgtEWWNIojY__pl6Vuekkgg,13756 +transformers/models/dab_detr/modeling_dab_detr.py,sha256=nAuMXGiFl35XdGyyAZF0eRZwuOHGDJ8qAHkbfVPUoMM,74773 +transformers/models/dac/__init__.py,sha256=UpwXPmSOQOwvbIvklM21-y5HKY7MEIInmTt65xMX6Hw,1029 +transformers/models/dac/__pycache__/__init__.cpython-313.pyc,, +transformers/models/dac/__pycache__/configuration_dac.cpython-313.pyc,, +transformers/models/dac/__pycache__/feature_extraction_dac.cpython-313.pyc,, +transformers/models/dac/__pycache__/modeling_dac.cpython-313.pyc,, +transformers/models/dac/configuration_dac.py,sha256=Exf0bhzmsEvxLxSJTOpdjPL6Pc30SHKW1MZ23aVdt1M,4581 +transformers/models/dac/feature_extraction_dac.py,sha256=P7duIMDqPBeXD-LXJwims_O6ZBCFZaDDGTR1AIOkU_k,8059 +transformers/models/dac/modeling_dac.py,sha256=DCnmUdaDWotLu7NAkymap4ZcxM9EUE6EMC8iBD6VN2A,28784 +transformers/models/data2vec/__init__.py,sha256=-2iFF1Rb8eF9cccBNLA29zgeFV1ADYaSLoQgf6K6KB8,1238 +transformers/models/data2vec/__pycache__/__init__.cpython-313.pyc,, +transformers/models/data2vec/__pycache__/configuration_data2vec_audio.cpython-313.pyc,, +transformers/models/data2vec/__pycache__/configuration_data2vec_text.cpython-313.pyc,, +transformers/models/data2vec/__pycache__/configuration_data2vec_vision.cpython-313.pyc,, +transformers/models/data2vec/__pycache__/modeling_data2vec_audio.cpython-313.pyc,, +transformers/models/data2vec/__pycache__/modeling_data2vec_text.cpython-313.pyc,, +transformers/models/data2vec/__pycache__/modeling_data2vec_vision.cpython-313.pyc,, +transformers/models/data2vec/__pycache__/modeling_tf_data2vec_vision.cpython-313.pyc,, +transformers/models/data2vec/__pycache__/modular_data2vec_audio.cpython-313.pyc,, +transformers/models/data2vec/configuration_data2vec_audio.py,sha256=SmJMa0tBoQZudlxPe1RvpSq0YwB10GjTCify0NwL6mg,16373 +transformers/models/data2vec/configuration_data2vec_text.py,sha256=EJRVy6uJcVoDFVdMJfGaIOx_cIIW03A58N5X849i7G4,7361 +transformers/models/data2vec/configuration_data2vec_vision.py,sha256=8LsGPjPhFaBXpMF6LrtndMoXTE33pTnfZ1p9Lz06c7k,9314 +transformers/models/data2vec/modeling_data2vec_audio.py,sha256=q2RKNuqtdsxTCa3SKPoq7jG_J1ZxX0V5Aw5t8bvdvAA,59332 +transformers/models/data2vec/modeling_data2vec_text.py,sha256=lTKlMp3shNiw_oxJ0aKAflFEb5n7A3q7SyOLBICpP0c,60803 +transformers/models/data2vec/modeling_data2vec_vision.py,sha256=abpnJeWUYobkfjq_V2hd7eNHXaVviQ7Y1NnIbw8DBEQ,58534 +transformers/models/data2vec/modeling_tf_data2vec_vision.py,sha256=GO5cK8C7P1jtULl88FYw0T8Thao7E09IMx4whTzq6H0,73392 +transformers/models/data2vec/modular_data2vec_audio.py,sha256=y_ux_oMsO0Gul9oyTl_KJmGwC8nfKkNXPQE_nfviuaA,9355 +transformers/models/dbrx/__init__.py,sha256=Kzn3gm0QHW9RKEmog_IfdCGam5TXSCzkOs_WHC43sgM,989 +transformers/models/dbrx/__pycache__/__init__.cpython-313.pyc,, +transformers/models/dbrx/__pycache__/configuration_dbrx.cpython-313.pyc,, +transformers/models/dbrx/__pycache__/modeling_dbrx.cpython-313.pyc,, +transformers/models/dbrx/configuration_dbrx.py,sha256=URihH21WVlyfVL-hRw1YhQ2PtIvQXY77Ig1AiWTsT0o,9946 +transformers/models/dbrx/modeling_dbrx.py,sha256=SSD_vv3sGSOPqLACybzJCoKAoIp-5U7GVMh9TPULnPg,55353 +transformers/models/deberta/__init__.py,sha256=diL764eL8gu80XkBDQU9nI6Zy39ArO0d85MtcZ4_NPw,1119 +transformers/models/deberta/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deberta/__pycache__/configuration_deberta.cpython-313.pyc,, +transformers/models/deberta/__pycache__/modeling_deberta.cpython-313.pyc,, +transformers/models/deberta/__pycache__/modeling_tf_deberta.cpython-313.pyc,, +transformers/models/deberta/__pycache__/tokenization_deberta.cpython-313.pyc,, +transformers/models/deberta/__pycache__/tokenization_deberta_fast.cpython-313.pyc,, +transformers/models/deberta/configuration_deberta.py,sha256=GFkrPn9TgA3QRrRTYsxFnQRsFRzFHPlpDJwS_HCo9Go,9024 +transformers/models/deberta/modeling_deberta.py,sha256=uNJFMuaQQlSJBw5i1qrhULMSIuN_uMZOx6eW4A9X4bs,48785 +transformers/models/deberta/modeling_tf_deberta.py,sha256=6rH8jdCWMoxKnurItq1ilWZ8KbaM-IbUGkPECrUMR9M,69231 +transformers/models/deberta/tokenization_deberta.py,sha256=sEbB4J8F0BdwyxfkOnkwPc4AWBHcwRyVE7Ql9AN8R-Q,15951 +transformers/models/deberta/tokenization_deberta_fast.py,sha256=uoJE9AssyGms5uRMxIy3awKj0W8nJ7sYFB0XiZXz47k,9121 +transformers/models/deberta_v2/__init__.py,sha256=N6wcSGakSmmHDW_QelFsn58zuDFTuvbctgkyC0OfQ5Y,1134 +transformers/models/deberta_v2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deberta_v2/__pycache__/configuration_deberta_v2.cpython-313.pyc,, +transformers/models/deberta_v2/__pycache__/modeling_deberta_v2.cpython-313.pyc,, +transformers/models/deberta_v2/__pycache__/modeling_tf_deberta_v2.cpython-313.pyc,, +transformers/models/deberta_v2/__pycache__/tokenization_deberta_v2.cpython-313.pyc,, +transformers/models/deberta_v2/__pycache__/tokenization_deberta_v2_fast.cpython-313.pyc,, +transformers/models/deberta_v2/configuration_deberta_v2.py,sha256=96TOPpbrNSW2e-Mvs2D3S-AuKyb8r_kLtStv0TRH_rY,8964 +transformers/models/deberta_v2/modeling_deberta_v2.py,sha256=8ZQ-fvLRecV7Aji0o5ClBISkkBU5_sQeM_47nOiDFlI,56711 +transformers/models/deberta_v2/modeling_tf_deberta_v2.py,sha256=3eJKDG8GQCcDXSKO8AWikBsCQuUZzOOC-tYO1j3yaL0,81610 +transformers/models/deberta_v2/tokenization_deberta_v2.py,sha256=WhRgGdG440RsyEZp6yV9IudKT0F2W8uXf2zLuTvcdY4,19731 +transformers/models/deberta_v2/tokenization_deberta_v2_fast.py,sha256=_i28i-MLIDWhqACnNKhrDcDOopjy2ulJb3R0HYqxBtc,8593 +transformers/models/decision_transformer/__init__.py,sha256=8XAHnFrFv8IFz495cQLTeaAk2G1AVRT7roauVHCGoJs,1021 +transformers/models/decision_transformer/__pycache__/__init__.cpython-313.pyc,, +transformers/models/decision_transformer/__pycache__/configuration_decision_transformer.cpython-313.pyc,, +transformers/models/decision_transformer/__pycache__/modeling_decision_transformer.cpython-313.pyc,, +transformers/models/decision_transformer/configuration_decision_transformer.py,sha256=hKOOb_TuM0XU7fDQu9sl2o6iNYYt16Dll3JUCKecFB4,7029 +transformers/models/decision_transformer/modeling_decision_transformer.py,sha256=wkFGMQqkzE70n4A_iORkOkt9sKPVXJqf0MpKvUX7eVY,42639 +transformers/models/deepseek_v2/__init__.py,sha256=cRpNT946KLnKXl4i2mGlImi9QLOe2a1ocnWNjBSbK68,1005 +transformers/models/deepseek_v2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deepseek_v2/__pycache__/configuration_deepseek_v2.cpython-313.pyc,, +transformers/models/deepseek_v2/__pycache__/modeling_deepseek_v2.cpython-313.pyc,, +transformers/models/deepseek_v2/__pycache__/modular_deepseek_v2.cpython-313.pyc,, +transformers/models/deepseek_v2/configuration_deepseek_v2.py,sha256=8snqY9qX7eRSMd28UfvOdVe8o19ABxcE21ak1WW1XCw,12231 +transformers/models/deepseek_v2/modeling_deepseek_v2.py,sha256=UYlUigj_hp9XHEoMnBh0XnuAllwCN2-UUBc2_eXvMQ8,27603 +transformers/models/deepseek_v2/modular_deepseek_v2.py,sha256=b1YihZ_fdIhnVOsp-56xwL58W4zjD3mfQPO2ZLITAFk,23569 +transformers/models/deepseek_v3/__init__.py,sha256=t-ejxAfULC_tUrUucNLt-x3hbTEIqUQp96m2DRFeaTg,1008 +transformers/models/deepseek_v3/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deepseek_v3/__pycache__/configuration_deepseek_v3.cpython-313.pyc,, +transformers/models/deepseek_v3/__pycache__/modeling_deepseek_v3.cpython-313.pyc,, +transformers/models/deepseek_v3/__pycache__/modular_deepseek_v3.cpython-313.pyc,, +transformers/models/deepseek_v3/configuration_deepseek_v3.py,sha256=VEcrAJDOUBn4NmPDqAXDZ47gCRGGjZOaKsGL0qSH5xE,12703 +transformers/models/deepseek_v3/modeling_deepseek_v3.py,sha256=bMxjeaHZMXRuwVbmfdKniowQKpwSrxLpaAKKA_igBok,30491 +transformers/models/deepseek_v3/modular_deepseek_v3.py,sha256=7e7Ix8HnsXrubPR8Yr4vuKPk6fyr7nCiiRfbrEDFlLg,15832 +transformers/models/deepseek_vl/__init__.py,sha256=ZjpL2NkjCM_tsGMz_fY7bOQ2HQwimj3RL4cocjrEtHI,1162 +transformers/models/deepseek_vl/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deepseek_vl/__pycache__/configuration_deepseek_vl.cpython-313.pyc,, +transformers/models/deepseek_vl/__pycache__/image_processing_deepseek_vl.cpython-313.pyc,, +transformers/models/deepseek_vl/__pycache__/image_processing_deepseek_vl_fast.cpython-313.pyc,, +transformers/models/deepseek_vl/__pycache__/modeling_deepseek_vl.cpython-313.pyc,, +transformers/models/deepseek_vl/__pycache__/modular_deepseek_vl.cpython-313.pyc,, +transformers/models/deepseek_vl/__pycache__/processing_deepseek_vl.cpython-313.pyc,, +transformers/models/deepseek_vl/configuration_deepseek_vl.py,sha256=z2lyOgCr8XztfjQoWNgco-pHZY6n6pcEH8jQiFw8L7E,4593 +transformers/models/deepseek_vl/image_processing_deepseek_vl.py,sha256=DKk5PicCpf8rvOAwL9b6o6IiA2GQmsrb3cjGQ_hmg1o,21602 +transformers/models/deepseek_vl/image_processing_deepseek_vl_fast.py,sha256=Hl_r3YqjdC-4yo5ZaeKH0YwitqFGEleVlXsQVCCh92g,8123 +transformers/models/deepseek_vl/modeling_deepseek_vl.py,sha256=gPlb_MQO_XjbCb0Fnd6NBjT3sBuS6ZevZgo_bZnsHFQ,15304 +transformers/models/deepseek_vl/modular_deepseek_vl.py,sha256=NSXCsWAom5biDuiD4D9MLsSe91Y3wVM3tb0mXAquE9g,13554 +transformers/models/deepseek_vl/processing_deepseek_vl.py,sha256=Xpk3WxJObG5j3MRJOI0qDoCiaIPw2gINw2iYgt7ztXQ,7966 +transformers/models/deepseek_vl_hybrid/__init__.py,sha256=FjSY1MYTEUP8BbAqIG2DZSAzmLh17w8aZr7pUkC_Gto,1257 +transformers/models/deepseek_vl_hybrid/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deepseek_vl_hybrid/__pycache__/configuration_deepseek_vl_hybrid.cpython-313.pyc,, +transformers/models/deepseek_vl_hybrid/__pycache__/image_processing_deepseek_vl_hybrid.cpython-313.pyc,, +transformers/models/deepseek_vl_hybrid/__pycache__/image_processing_deepseek_vl_hybrid_fast.cpython-313.pyc,, +transformers/models/deepseek_vl_hybrid/__pycache__/modeling_deepseek_vl_hybrid.cpython-313.pyc,, +transformers/models/deepseek_vl_hybrid/__pycache__/modular_deepseek_vl_hybrid.cpython-313.pyc,, +transformers/models/deepseek_vl_hybrid/__pycache__/processing_deepseek_vl_hybrid.cpython-313.pyc,, +transformers/models/deepseek_vl_hybrid/configuration_deepseek_vl_hybrid.py,sha256=ETv0sLD0Ex8tVvRQxCaqJLpiS7BJ15Bl1y0w6maROiI,5507 +transformers/models/deepseek_vl_hybrid/image_processing_deepseek_vl_hybrid.py,sha256=vXtQ6OxoDfkjZVhRJDroJDxQStP9pRyxgUO02CQOO-k,26698 +transformers/models/deepseek_vl_hybrid/image_processing_deepseek_vl_hybrid_fast.py,sha256=fM_Mju8IhasrnIBlwq19Q5yOKZMrIe6BTgNVbbMnV6U,14783 +transformers/models/deepseek_vl_hybrid/modeling_deepseek_vl_hybrid.py,sha256=V6Lf7hZh89JjfIQemdD4O4hXLHIAq_GKz-eRddArGPU,22447 +transformers/models/deepseek_vl_hybrid/modular_deepseek_vl_hybrid.py,sha256=BWqPy_8BH5XmQ6sK2oUIPyAfnJiplJSKxefuKcGO5AY,48681 +transformers/models/deepseek_vl_hybrid/processing_deepseek_vl_hybrid.py,sha256=IqClaZkPl-79zpssFCYVh_Q-pIgCa2RgRsDfX1mRrJ4,8176 +transformers/models/deformable_detr/__init__.py,sha256=_ae-sABBY17hOT28SN_d0GLeRVjya0W4aqniH8u8Bcw,1176 +transformers/models/deformable_detr/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deformable_detr/__pycache__/configuration_deformable_detr.cpython-313.pyc,, +transformers/models/deformable_detr/__pycache__/feature_extraction_deformable_detr.cpython-313.pyc,, +transformers/models/deformable_detr/__pycache__/image_processing_deformable_detr.cpython-313.pyc,, +transformers/models/deformable_detr/__pycache__/image_processing_deformable_detr_fast.cpython-313.pyc,, +transformers/models/deformable_detr/__pycache__/modeling_deformable_detr.cpython-313.pyc,, +transformers/models/deformable_detr/__pycache__/modular_deformable_detr.cpython-313.pyc,, +transformers/models/deformable_detr/configuration_deformable_detr.py,sha256=5CRV4cdMS_5N4NS8Qa9ymAKocEaGIegCQ0xFmwA-Ijw,14794 +transformers/models/deformable_detr/feature_extraction_deformable_detr.py,sha256=ifv-_D_b2_5GsavP72mH6etQoobhFYmf2NB4Fyl9nP0,1668 +transformers/models/deformable_detr/image_processing_deformable_detr.py,sha256=JtqFhMBmP8845nKe0qBrTARIMpAgzTeCVIJ8EDgf09Q,73307 +transformers/models/deformable_detr/image_processing_deformable_detr_fast.py,sha256=mu_jyTSy5e8QKPQ5HBWhoh_ox8e2OjGhTPWfE4ARidM,35141 +transformers/models/deformable_detr/modeling_deformable_detr.py,sha256=9bhVx_T1iKSaMJHLMkmL5PA6v9nacX12r9FXq1Qc0to,88592 +transformers/models/deformable_detr/modular_deformable_detr.py,sha256=kOtW_VmpwsmXaQMIalkg_ITMO-zm8IkceXw_9drFxFA,6543 +transformers/models/deit/__init__.py,sha256=8S1h-sIvhRy1EiQ7DKXHqqNEgR0_juhrAyQZ2AU1rVw,1155 +transformers/models/deit/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deit/__pycache__/configuration_deit.cpython-313.pyc,, +transformers/models/deit/__pycache__/feature_extraction_deit.cpython-313.pyc,, +transformers/models/deit/__pycache__/image_processing_deit.cpython-313.pyc,, +transformers/models/deit/__pycache__/image_processing_deit_fast.cpython-313.pyc,, +transformers/models/deit/__pycache__/modeling_deit.cpython-313.pyc,, +transformers/models/deit/__pycache__/modeling_tf_deit.cpython-313.pyc,, +transformers/models/deit/configuration_deit.py,sha256=qd4pscfJKPWbxhmdCzj1Fdv19o8KPIuXwJpUI04Puf4,6375 +transformers/models/deit/feature_extraction_deit.py,sha256=0kfS_x_-B8O9b6ECuj3kosuPP9bwHKO_ZzjuvkBnPsc,1284 +transformers/models/deit/image_processing_deit.py,sha256=ro9TJB0U53dijS_v_Q0lMUUr_j1A3LkU4FwCYmNgeN0,15342 +transformers/models/deit/image_processing_deit_fast.py,sha256=DrJaX0I_Pu2tihvqPrsUZRdIWFTtH4BrTKT22RqVfYU,1399 +transformers/models/deit/modeling_deit.py,sha256=sUl9RJl5M1VAwdGXBkqmK9rf2xcYtejETfVAcRdMgnA,32912 +transformers/models/deit/modeling_tf_deit.py,sha256=iGVk1mPqdtOAbd6jo5j3gbueqXA30JVy7XBs2tE9kRY,51676 +transformers/models/deprecated/__init__.py,sha256=upBgfMVSzFMxNZYSd4AXNGvd0IkwHZ-ygfdf34srafo,1596 +transformers/models/deprecated/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deprecated/bort/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +transformers/models/deprecated/bort/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deprecated/deta/__init__.py,sha256=WNvQWU-pO4wBtGZPE5TAHF0OF1RPjEIgql1GC9wnmf8,1032 +transformers/models/deprecated/deta/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deprecated/deta/__pycache__/configuration_deta.cpython-313.pyc,, +transformers/models/deprecated/deta/__pycache__/image_processing_deta.cpython-313.pyc,, +transformers/models/deprecated/deta/__pycache__/modeling_deta.cpython-313.pyc,, +transformers/models/deprecated/deta/configuration_deta.py,sha256=Hvr4QZAIruQw-s7C2JarGwAzrXzqg8FU40Co6g7Hhlc,14198 +transformers/models/deprecated/deta/image_processing_deta.py,sha256=HnnwUSDq_e5aIugcapay6VaE7qVWqQenj0W4Nk64-9M,54964 +transformers/models/deprecated/deta/modeling_deta.py,sha256=H-H8_K3t22MJjFt7Wm6ECcCZBIRHu1sEFLPmxS3HFzw,135531 +transformers/models/deprecated/efficientformer/__init__.py,sha256=RIMtCzn7AGYDfv279AZxapQ7tM7FFguknlC5CShrV3M,1112 +transformers/models/deprecated/efficientformer/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deprecated/efficientformer/__pycache__/configuration_efficientformer.cpython-313.pyc,, +transformers/models/deprecated/efficientformer/__pycache__/image_processing_efficientformer.cpython-313.pyc,, +transformers/models/deprecated/efficientformer/__pycache__/modeling_efficientformer.cpython-313.pyc,, +transformers/models/deprecated/efficientformer/__pycache__/modeling_tf_efficientformer.cpython-313.pyc,, +transformers/models/deprecated/efficientformer/configuration_efficientformer.py,sha256=liR9COZM1WNt1Cp7LtY0hBm4HQxcFcTSML5Sokq8Jwc,7739 +transformers/models/deprecated/efficientformer/image_processing_efficientformer.py,sha256=hJ6jnRePCFxsVK3VQphL5M0gwJLZg9bA-eObxYE7V1Q,15782 +transformers/models/deprecated/efficientformer/modeling_efficientformer.py,sha256=ILkaYcZ-GKzxhlizv8vXxIoQQUIdZm6-NDCuHCwSV54,32648 +transformers/models/deprecated/efficientformer/modeling_tf_efficientformer.py,sha256=5vHabl_36CmhqvWXoP2Lxbno9i0MXsKErQtY43-bf-U,49406 +transformers/models/deprecated/ernie_m/__init__.py,sha256=LlPR0I3qUe-L3t0xeakW3FKohvQgcWBgRMxENjy6_Ew,1047 +transformers/models/deprecated/ernie_m/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deprecated/ernie_m/__pycache__/configuration_ernie_m.cpython-313.pyc,, +transformers/models/deprecated/ernie_m/__pycache__/modeling_ernie_m.cpython-313.pyc,, +transformers/models/deprecated/ernie_m/__pycache__/tokenization_ernie_m.cpython-313.pyc,, +transformers/models/deprecated/ernie_m/configuration_ernie_m.py,sha256=jPmSWmo38ovIiyrzIcHvnj-CdbnzqkogCTcCkPPsf0o,5889 +transformers/models/deprecated/ernie_m/modeling_ernie_m.py,sha256=Je-y2aRLSGIzCxxM6UjqzVVkLT-fXD7LXDo2QnFIXwo,47347 +transformers/models/deprecated/ernie_m/tokenization_ernie_m.py,sha256=se3eYEzFrfa1Z_Xnla9l7c4WsXBeF4gePXVh0jf81K4,16250 +transformers/models/deprecated/gptsan_japanese/__init__.py,sha256=Q0KI_MuMRbQNKBzYOEsDgNZLktGUjTUWlm-1-TmdAeE,1061 +transformers/models/deprecated/gptsan_japanese/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deprecated/gptsan_japanese/__pycache__/configuration_gptsan_japanese.cpython-313.pyc,, +transformers/models/deprecated/gptsan_japanese/__pycache__/modeling_gptsan_japanese.cpython-313.pyc,, +transformers/models/deprecated/gptsan_japanese/__pycache__/tokenization_gptsan_japanese.cpython-313.pyc,, +transformers/models/deprecated/gptsan_japanese/configuration_gptsan_japanese.py,sha256=UQbzr2Yr4EkjZgUE0hj8HYuelQDZZi0b7nw_vwtOvvk,7169 +transformers/models/deprecated/gptsan_japanese/modeling_gptsan_japanese.py,sha256=zniTVsH8H09BVrsg-62uUfU92d76DbudGDDH7ARNkPw,65096 +transformers/models/deprecated/gptsan_japanese/tokenization_gptsan_japanese.py,sha256=Ndjdq7_mYUxUaUajF091gKrwYTlNMGUGih9MngjtIUA,23341 +transformers/models/deprecated/graphormer/__init__.py,sha256=qvmWWqa8KkAItGYVAHgjatAQlmjcF0bovLch0U0ubc8,1003 +transformers/models/deprecated/graphormer/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deprecated/graphormer/__pycache__/collating_graphormer.cpython-313.pyc,, +transformers/models/deprecated/graphormer/__pycache__/configuration_graphormer.cpython-313.pyc,, +transformers/models/deprecated/graphormer/__pycache__/modeling_graphormer.cpython-313.pyc,, +transformers/models/deprecated/graphormer/algos_graphormer.pyx,sha256=b_Qlm1hKCHnAqx6oOLGC9LkivAV0K_AZRGgXT9MmBas,3635 +transformers/models/deprecated/graphormer/collating_graphormer.py,sha256=Tsm00bqXeKDCtESNHUE4q6OotxCjW9rbjy9MKyo3rvk,6075 +transformers/models/deprecated/graphormer/configuration_graphormer.py,sha256=vg6O_wY-Xn_aTVTg5XTYqNREozAomSLCHVo_diH9Pas,10480 +transformers/models/deprecated/graphormer/modeling_graphormer.py,sha256=QbQNmiKzedpILarXhoFiZR9McA29QJ1w_eJ2DVaMdpQ,37118 +transformers/models/deprecated/jukebox/__init__.py,sha256=5boFy1Eld2ll-ZpGhar77TZp4gVN5m-Ks8QumIZeAcI,1037 +transformers/models/deprecated/jukebox/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deprecated/jukebox/__pycache__/configuration_jukebox.cpython-313.pyc,, +transformers/models/deprecated/jukebox/__pycache__/modeling_jukebox.cpython-313.pyc,, +transformers/models/deprecated/jukebox/__pycache__/tokenization_jukebox.cpython-313.pyc,, +transformers/models/deprecated/jukebox/configuration_jukebox.py,sha256=etr-yMp1t851E6omXV9ZrZdruQL7NkA1lifBu9h6f6A,26837 +transformers/models/deprecated/jukebox/modeling_jukebox.py,sha256=TsKdCCiOrpbuoEc2cDZV518JyOgV7mkoFUobjcFr0VA,119621 +transformers/models/deprecated/jukebox/tokenization_jukebox.py,sha256=bwpU60IQUNrPoZ0Mg4cobVe9_04tnda-6KU0koLpAC0,17366 +transformers/models/deprecated/mctct/__init__.py,sha256=oL2eRCmC1eKqGcN2nn7WWmVh4Lyq6zvfTK8Fbcct-Cc,1073 +transformers/models/deprecated/mctct/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deprecated/mctct/__pycache__/configuration_mctct.cpython-313.pyc,, +transformers/models/deprecated/mctct/__pycache__/feature_extraction_mctct.cpython-313.pyc,, +transformers/models/deprecated/mctct/__pycache__/modeling_mctct.cpython-313.pyc,, +transformers/models/deprecated/mctct/__pycache__/processing_mctct.cpython-313.pyc,, +transformers/models/deprecated/mctct/configuration_mctct.py,sha256=se5nTkdBsiWJT9eGIbsAju4olV_f-GddUDtba3HUSEk,9101 +transformers/models/deprecated/mctct/feature_extraction_mctct.py,sha256=FdZKMKkJodKfUjYXa4r12b3AkTD7qcwBDF6JCbgW-68,13494 +transformers/models/deprecated/mctct/modeling_mctct.py,sha256=0KPKMc0DVRpDeplg15-Z410B8jGHCctPebbzLoPgk3o,32526 +transformers/models/deprecated/mctct/processing_mctct.py,sha256=OcRrtpUDakmEL1HbH9atDrl9QYm4DrlR89QZoHtVwO0,5652 +transformers/models/deprecated/mega/__init__.py,sha256=MAxMoZtbT_fdVUYgMGeBlgwYRYVz07EeK5RyL2GB-ic,991 +transformers/models/deprecated/mega/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deprecated/mega/__pycache__/configuration_mega.cpython-313.pyc,, +transformers/models/deprecated/mega/__pycache__/modeling_mega.cpython-313.pyc,, +transformers/models/deprecated/mega/configuration_mega.py,sha256=nWdk-zPvpSI7UmUeXnClAwk-hEXCM5f5vt4xxJyAE_E,12642 +transformers/models/deprecated/mega/modeling_mega.py,sha256=ZQePgkC55tsABy9Jzz9nAH1ilQtQCPTKK_w6S3AQHTY,109494 +transformers/models/deprecated/mmbt/__init__.py,sha256=X5f5OKVKnz-mOSV_v9IbfPsDFzOpYCf2yU4ktLWWmOA,991 +transformers/models/deprecated/mmbt/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deprecated/mmbt/__pycache__/configuration_mmbt.cpython-313.pyc,, +transformers/models/deprecated/mmbt/__pycache__/modeling_mmbt.cpython-313.pyc,, +transformers/models/deprecated/mmbt/configuration_mmbt.py,sha256=UNksVsSmP6e_52vlf5pa9ETgiQw6M2pM2ocVxq52fWY,1624 +transformers/models/deprecated/mmbt/modeling_mmbt.py,sha256=xil7uW5Q1fHo-0yo4eC0K6egN-sO0LasqBxe2wp9nTE,18983 +transformers/models/deprecated/nat/__init__.py,sha256=Ggl4KcqVEX5Ub66NyyA7fyMz_oBLHOMUlqRTVrYwAYs,989 +transformers/models/deprecated/nat/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deprecated/nat/__pycache__/configuration_nat.cpython-313.pyc,, +transformers/models/deprecated/nat/__pycache__/modeling_nat.cpython-313.pyc,, +transformers/models/deprecated/nat/configuration_nat.py,sha256=7ZZXfsex0BfTQ5HMdINit2aAC1j_6me30ctX4IDM35o,7001 +transformers/models/deprecated/nat/modeling_nat.py,sha256=b5awRdTSdVu223JQXBQlA5OS1F9cnbs0UBm6hKMEFTQ,38726 +transformers/models/deprecated/nezha/__init__.py,sha256=3WxwqDdNckh4KfXKV4gxIeKvkr_U1GBDA-MdEHux3JM,993 +transformers/models/deprecated/nezha/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deprecated/nezha/__pycache__/configuration_nezha.cpython-313.pyc,, +transformers/models/deprecated/nezha/__pycache__/modeling_nezha.cpython-313.pyc,, +transformers/models/deprecated/nezha/configuration_nezha.py,sha256=gZvb3NVibiLmMrTrjzlKcChmBET6dOhyAQCjFFDyp0Y,4845 +transformers/models/deprecated/nezha/modeling_nezha.py,sha256=z6e2YNMpX6TVKaKGkvldDSJ9X5-uYEuunNd6ZblUaTU,73939 +transformers/models/deprecated/open_llama/__init__.py,sha256=hhWBBxouawhwSYkuWi7Co_dO86xNFofKrtxacOlcmiM,1023 +transformers/models/deprecated/open_llama/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deprecated/open_llama/__pycache__/configuration_open_llama.cpython-313.pyc,, +transformers/models/deprecated/open_llama/__pycache__/modeling_open_llama.cpython-313.pyc,, +transformers/models/deprecated/open_llama/configuration_open_llama.py,sha256=Iwpxsxa85jUukrVctVnhz1zOswDR889ZYcXbW6-bxuA,7800 +transformers/models/deprecated/open_llama/modeling_open_llama.py,sha256=34hkO14GtiYHH0qdvZv-KIKmxvcMpnIyMJB0_gl4smM,42741 +transformers/models/deprecated/qdqbert/__init__.py,sha256=0sVNCbOvGXfJhrGbtQ7zV4v8rctY5pMzYKUvngVcvRg,1020 +transformers/models/deprecated/qdqbert/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deprecated/qdqbert/__pycache__/configuration_qdqbert.cpython-313.pyc,, +transformers/models/deprecated/qdqbert/__pycache__/modeling_qdqbert.cpython-313.pyc,, +transformers/models/deprecated/qdqbert/configuration_qdqbert.py,sha256=HCvSo5NpospAUVinJu8NEGtDo4Oa2KHQX-_1kTkFA6g,5719 +transformers/models/deprecated/qdqbert/modeling_qdqbert.py,sha256=h5pmCB9MxX4gcXLvBln4dJuPiQOo_sisk-pThBsjKgY,76910 +transformers/models/deprecated/realm/__init__.py,sha256=Cqg86mvi125eaBzeoP10ykpvXvHD-InC6JYTDJXM3Ik,1109 +transformers/models/deprecated/realm/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deprecated/realm/__pycache__/configuration_realm.cpython-313.pyc,, +transformers/models/deprecated/realm/__pycache__/modeling_realm.cpython-313.pyc,, +transformers/models/deprecated/realm/__pycache__/retrieval_realm.cpython-313.pyc,, +transformers/models/deprecated/realm/__pycache__/tokenization_realm.cpython-313.pyc,, +transformers/models/deprecated/realm/__pycache__/tokenization_realm_fast.cpython-313.pyc,, +transformers/models/deprecated/realm/configuration_realm.py,sha256=1CmnEKCJyYUmedP5pXcvLwrT2ThND3YHeuHfaEokz3M,7585 +transformers/models/deprecated/realm/modeling_realm.py,sha256=4c_tZx1f1VGR3Bmp-J-VMvP3JoIgKT6sr0aSBS9H1sA,83578 +transformers/models/deprecated/realm/retrieval_realm.py,sha256=bGzuAOl8j59toVMwzUHZbpKkNBuAeP-qo1kg8Wdh0q8,7012 +transformers/models/deprecated/realm/tokenization_realm.py,sha256=c_H4sBrcnU_bXnQZrHyPuPRvMJx_7Coc3jB0Skkyxzs,21995 +transformers/models/deprecated/realm/tokenization_realm_fast.py,sha256=vzueU81dDD3DLcdK5nhZIRJH6avUkPAB2lJ5e8F9jbA,9858 +transformers/models/deprecated/retribert/__init__.py,sha256=bitEp-fOvn6_HvMY2CUlvJCGV5-baV6Bvl7EcbBh1jM,1090 +transformers/models/deprecated/retribert/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deprecated/retribert/__pycache__/configuration_retribert.cpython-313.pyc,, +transformers/models/deprecated/retribert/__pycache__/modeling_retribert.cpython-313.pyc,, +transformers/models/deprecated/retribert/__pycache__/tokenization_retribert.cpython-313.pyc,, +transformers/models/deprecated/retribert/__pycache__/tokenization_retribert_fast.cpython-313.pyc,, +transformers/models/deprecated/retribert/configuration_retribert.py,sha256=nAqsKCL46N2eJwlcyfDs2ijqCWo7SDsOInq4rOsDAhs,5232 +transformers/models/deprecated/retribert/modeling_retribert.py,sha256=PzP_j4PTRb0wLpYvS3A6D9CjRFNT-gc99MZos1LwBR8,9349 +transformers/models/deprecated/retribert/tokenization_retribert.py,sha256=Uo7UQxxwhM-SonyGQtjXvpTZK7a9FLs2UTENCOXYIfo,19536 +transformers/models/deprecated/retribert/tokenization_retribert_fast.py,sha256=EXkSqeK6zEF8yng9UPSXjrxKaTHnhyGHWqlRugUsEYQ,6730 +transformers/models/deprecated/speech_to_text_2/__init__.py,sha256=gpV3g4cmZOc1rTvOVZQN9dY1eGoXQvVnSq0LMzYYJm0,1111 +transformers/models/deprecated/speech_to_text_2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deprecated/speech_to_text_2/__pycache__/configuration_speech_to_text_2.cpython-313.pyc,, +transformers/models/deprecated/speech_to_text_2/__pycache__/modeling_speech_to_text_2.cpython-313.pyc,, +transformers/models/deprecated/speech_to_text_2/__pycache__/processing_speech_to_text_2.cpython-313.pyc,, +transformers/models/deprecated/speech_to_text_2/__pycache__/tokenization_speech_to_text_2.cpython-313.pyc,, +transformers/models/deprecated/speech_to_text_2/configuration_speech_to_text_2.py,sha256=0nf8Vrheuc_3fpZhc3fNFfd05fXLfKvCe6n9B5hAcNA,6052 +transformers/models/deprecated/speech_to_text_2/modeling_speech_to_text_2.py,sha256=zvhPGSByae0b0JkGOc32UBSePJIiyz7Ind37J_jkgaw,43395 +transformers/models/deprecated/speech_to_text_2/processing_speech_to_text_2.py,sha256=eiJ_HvJhKH4VNOkOW33rZqURdZ5ElyiaY0vg48utC4M,4212 +transformers/models/deprecated/speech_to_text_2/tokenization_speech_to_text_2.py,sha256=vTNJBHuH4CwO6SvtOgx24mWtZzjjdHpY1ee7TZw1XxQ,8424 +transformers/models/deprecated/tapex/__init__.py,sha256=YDgKE4wAmqYPQ9U94PaZXHGDiLkZQAoMt4mNiO3QrXg,958 +transformers/models/deprecated/tapex/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deprecated/tapex/__pycache__/tokenization_tapex.cpython-313.pyc,, +transformers/models/deprecated/tapex/tokenization_tapex.py,sha256=mrRkwgPKHyVUpOKYSI12x9_Pgr3Yyv8uWcePFxKWuI8,64396 +transformers/models/deprecated/trajectory_transformer/__init__.py,sha256=qhJ78kxJOG5Q5d_NDrIiH5_btuaAKfluEzKD_nuESPw,1027 +transformers/models/deprecated/trajectory_transformer/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deprecated/trajectory_transformer/__pycache__/configuration_trajectory_transformer.cpython-313.pyc,, +transformers/models/deprecated/trajectory_transformer/__pycache__/modeling_trajectory_transformer.cpython-313.pyc,, +transformers/models/deprecated/trajectory_transformer/configuration_trajectory_transformer.py,sha256=EEkSTX_sw2eYmNqYutj1acxjZnlM-jFldEtqycOwJko,7105 +transformers/models/deprecated/trajectory_transformer/modeling_trajectory_transformer.py,sha256=TlcEkjXlO7PMUOcFK63sFUpdA9L6mEbGbO8G2y3OfY8,25429 +transformers/models/deprecated/transfo_xl/__init__.py,sha256=_wXu1dOeNxgJemZTynDRPmYOWcMQpYwkxbHIC_070_M,1088 +transformers/models/deprecated/transfo_xl/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/configuration_transfo_xl.cpython-313.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/modeling_tf_transfo_xl.cpython-313.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/modeling_tf_transfo_xl_utilities.cpython-313.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/modeling_transfo_xl.cpython-313.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/modeling_transfo_xl_utilities.cpython-313.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/tokenization_transfo_xl.cpython-313.pyc,, +transformers/models/deprecated/transfo_xl/configuration_transfo_xl.py,sha256=5lUsNUzrmw3wWw_35s4TQgXQhf7C-QovFDkLs7R74Io,7905 +transformers/models/deprecated/transfo_xl/modeling_tf_transfo_xl.py,sha256=j4yoV9YRQ6xroWV1XXTIYH46EEKXgFA9_x8YIIGu76Q,46054 +transformers/models/deprecated/transfo_xl/modeling_tf_transfo_xl_utilities.py,sha256=Dlv3ZzRduWFBnZZHn8RegbW45XeCecuYCzzzZC3bDXs,7633 +transformers/models/deprecated/transfo_xl/modeling_transfo_xl.py,sha256=GNYB-j2j_3Q0Yg3xC-nCmzOOmQhCoPOQcrCp5IBVtnE,56120 +transformers/models/deprecated/transfo_xl/modeling_transfo_xl_utilities.py,sha256=L1l4K7sj8rwXzvhn7_-RK2UbOnYtfDUF0VdFr4L8nxA,10859 +transformers/models/deprecated/transfo_xl/tokenization_transfo_xl.py,sha256=PcUEB2E_ZkrCDiyWhxtVaHC3oa6DHEBQfja72VZ65bA,32193 +transformers/models/deprecated/tvlt/__init__.py,sha256=5kgH30TJlq9WEsRw6f5Bs7U4MxNFKzw2dvlENb-ZsPM,674 +transformers/models/deprecated/tvlt/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deprecated/tvlt/__pycache__/configuration_tvlt.cpython-313.pyc,, +transformers/models/deprecated/tvlt/__pycache__/feature_extraction_tvlt.cpython-313.pyc,, +transformers/models/deprecated/tvlt/__pycache__/image_processing_tvlt.cpython-313.pyc,, +transformers/models/deprecated/tvlt/__pycache__/modeling_tvlt.cpython-313.pyc,, +transformers/models/deprecated/tvlt/__pycache__/processing_tvlt.cpython-313.pyc,, +transformers/models/deprecated/tvlt/configuration_tvlt.py,sha256=PaJXPttCw4t832Pqo1pV0MBYa9f-oDljfmc2SBMFXCI,8650 +transformers/models/deprecated/tvlt/feature_extraction_tvlt.py,sha256=8NHtBJnMjCRq1tpCaOjKlzbajU6hN2sIk2JjlvhJl6I,10591 +transformers/models/deprecated/tvlt/image_processing_tvlt.py,sha256=lruyDzTOOVfLfgK2ThGm0lkHVl13qU8u8xxBhiM7-xs,20314 +transformers/models/deprecated/tvlt/modeling_tvlt.py,sha256=PUpjdZ_0yjRy7tTnpmpkLE_dG0RvV4AnEqs2nnbaY9Y,56275 +transformers/models/deprecated/tvlt/processing_tvlt.py,sha256=nfcrdlaKcTk14Zb6DOBAWi5w7dHIgrxskF0ELbGWJxg,3235 +transformers/models/deprecated/van/__init__.py,sha256=zH2jgRuTkGqz0fzogoEi1HhRMNmg8BbWjhZ9dwVWyM0,989 +transformers/models/deprecated/van/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deprecated/van/__pycache__/configuration_van.cpython-313.pyc,, +transformers/models/deprecated/van/__pycache__/modeling_van.cpython-313.pyc,, +transformers/models/deprecated/van/configuration_van.py,sha256=fTfaChmHuw2qoi_mZxIQxUH7JoVRBdkA38R_qPbrc3E,4683 +transformers/models/deprecated/van/modeling_van.py,sha256=015oRhkIzWHZTFuHcFJkFgjvPBN-8zkqLnqkV1fKAPs,20091 +transformers/models/deprecated/vit_hybrid/__init__.py,sha256=9OIBt-kLfL3VHtfpoj3rVLFzXbpwFu1F5QotHqQAUuM,1050 +transformers/models/deprecated/vit_hybrid/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deprecated/vit_hybrid/__pycache__/configuration_vit_hybrid.cpython-313.pyc,, +transformers/models/deprecated/vit_hybrid/__pycache__/image_processing_vit_hybrid.cpython-313.pyc,, +transformers/models/deprecated/vit_hybrid/__pycache__/modeling_vit_hybrid.cpython-313.pyc,, +transformers/models/deprecated/vit_hybrid/configuration_vit_hybrid.py,sha256=Eof6B__quu-2A2SDw7erhD6goglbdVsgB3wqJGEp9WE,8477 +transformers/models/deprecated/vit_hybrid/image_processing_vit_hybrid.py,sha256=yBdG4xtH8UkzOwpMQvjNxoucIN1QWlZE93Bx9UnaRGw,16367 +transformers/models/deprecated/vit_hybrid/modeling_vit_hybrid.py,sha256=lkcuhuA7FUgqijT_0qy69R-qfQzEsumMQw9CbpFo2as,31187 +transformers/models/deprecated/xlm_prophetnet/__init__.py,sha256=q9zJIXoPqoGPw0x9PQXvpTrpSCw1y1WyYoNCFz-X554,1058 +transformers/models/deprecated/xlm_prophetnet/__pycache__/__init__.cpython-313.pyc,, +transformers/models/deprecated/xlm_prophetnet/__pycache__/configuration_xlm_prophetnet.cpython-313.pyc,, +transformers/models/deprecated/xlm_prophetnet/__pycache__/modeling_xlm_prophetnet.cpython-313.pyc,, +transformers/models/deprecated/xlm_prophetnet/__pycache__/tokenization_xlm_prophetnet.cpython-313.pyc,, +transformers/models/deprecated/xlm_prophetnet/configuration_xlm_prophetnet.py,sha256=eFMoiTH5qFKL6R0aLhV9-uqUvB2oVVcgrAia6wuLnAY,8968 +transformers/models/deprecated/xlm_prophetnet/modeling_xlm_prophetnet.py,sha256=2WOSJBHp4_W0c75GOJVDKMDhaItVk-BkTCoiBJ_NcGk,114276 +transformers/models/deprecated/xlm_prophetnet/tokenization_xlm_prophetnet.py,sha256=5m053u9Y9NEbmCUJlhprGGdC8ZrXy9VkZF3UGg3fsmo,13153 +transformers/models/depth_anything/__init__.py,sha256=Jbd8LXt-fU3_cTF7jBrkBBw-Kzscv6o7O0YiZy0R8-A,1009 +transformers/models/depth_anything/__pycache__/__init__.cpython-313.pyc,, +transformers/models/depth_anything/__pycache__/configuration_depth_anything.cpython-313.pyc,, +transformers/models/depth_anything/__pycache__/modeling_depth_anything.cpython-313.pyc,, +transformers/models/depth_anything/configuration_depth_anything.py,sha256=s_lwOMcrASuIud4jMw8rBn54vNn2zBzb1RTVVm_1KDU,8189 +transformers/models/depth_anything/modeling_depth_anything.py,sha256=UUMEeEXiKc6HBksv-O_TK4c_NFY-Ad5xmKsZrDsjFn0,16675 +transformers/models/depth_pro/__init__.py,sha256=5R4N4IVUQuK8bCFtg9qGvJFceJaHXNj4HdCWkcsyELc,1096 +transformers/models/depth_pro/__pycache__/__init__.cpython-313.pyc,, +transformers/models/depth_pro/__pycache__/configuration_depth_pro.cpython-313.pyc,, +transformers/models/depth_pro/__pycache__/image_processing_depth_pro.cpython-313.pyc,, +transformers/models/depth_pro/__pycache__/image_processing_depth_pro_fast.cpython-313.pyc,, +transformers/models/depth_pro/__pycache__/modeling_depth_pro.cpython-313.pyc,, +transformers/models/depth_pro/configuration_depth_pro.py,sha256=egobmJm308gpyStYixPOET6ikjGZA5kN9U1iCu79Yyw,10675 +transformers/models/depth_pro/image_processing_depth_pro.py,sha256=YBJndM5kRF9WeS41cGGcBXQVmuVr1z42ZyHG2DmuADQ,18962 +transformers/models/depth_pro/image_processing_depth_pro_fast.py,sha256=Rk3J7PTb1TlrQ8PWN4jOqgSbgu1nnldyjSdmFWv_JDM,6697 +transformers/models/depth_pro/modeling_depth_pro.py,sha256=ArZOeVI_0hXCU7ueWa8PVEmg16EF-QS2n-YxVifFFwE,43072 +transformers/models/detr/__init__.py,sha256=YEWZnoCCgWt4KZNfbSi-v4KNDOJT2-ii2sxanyVDkvY,1120 +transformers/models/detr/__pycache__/__init__.cpython-313.pyc,, +transformers/models/detr/__pycache__/configuration_detr.cpython-313.pyc,, +transformers/models/detr/__pycache__/feature_extraction_detr.cpython-313.pyc,, +transformers/models/detr/__pycache__/image_processing_detr.cpython-313.pyc,, +transformers/models/detr/__pycache__/image_processing_detr_fast.cpython-313.pyc,, +transformers/models/detr/__pycache__/modeling_detr.cpython-313.pyc,, +transformers/models/detr/configuration_detr.py,sha256=wSdE-pszgelpLPMBREhmjXk77t8sPYthn9Cj4Qo02Y8,13918 +transformers/models/detr/feature_extraction_detr.py,sha256=VudvO9SXjwtxL9PPT8vM3vFKcpiOGOe6Mt8zbZuIV1I,1586 +transformers/models/detr/image_processing_detr.py,sha256=M58nEKuWgSzmumA5rIondxnghmSi-I0K-s6qgH9ZFlw,94099 +transformers/models/detr/image_processing_detr_fast.py,sha256=AQOc6jfFZ9fHk1JdGMz8r1M5Z138yugJRlVwt8D8BVw,58606 +transformers/models/detr/modeling_detr.py,sha256=JUhvZIbo4eeazVfnZzXCrq7OY0N4LP9vJ4Dsh2umBxU,77797 +transformers/models/dia/__init__.py,sha256=fvBcwJ7FAFDO6RNyUUMGrdSlUtowciNo3YYv7R2Qz1c,1133 +transformers/models/dia/__pycache__/__init__.cpython-313.pyc,, +transformers/models/dia/__pycache__/configuration_dia.cpython-313.pyc,, +transformers/models/dia/__pycache__/feature_extraction_dia.cpython-313.pyc,, +transformers/models/dia/__pycache__/generation_dia.cpython-313.pyc,, +transformers/models/dia/__pycache__/modeling_dia.cpython-313.pyc,, +transformers/models/dia/__pycache__/modular_dia.cpython-313.pyc,, +transformers/models/dia/__pycache__/processing_dia.cpython-313.pyc,, +transformers/models/dia/__pycache__/tokenization_dia.cpython-313.pyc,, +transformers/models/dia/configuration_dia.py,sha256=G6BnNf5536gSsmhLIJco3EMulv0tk7u89VDQLkFp3Hw,20609 +transformers/models/dia/feature_extraction_dia.py,sha256=AlV5JpJ3IfoNTHjK-PPH96ZvxNaM-VMgb_-tjoe6zrM,8501 +transformers/models/dia/generation_dia.py,sha256=pS34MqzVH7_Vw7SJM_vJNzm4Wg8unT9zxx-OjNufsQ8,21579 +transformers/models/dia/modeling_dia.py,sha256=19q2BMDiq5dF2TD-p3gZwq0LK8aWW635Pb9KMnx9X1w,42571 +transformers/models/dia/modular_dia.py,sha256=kXDVJIFH3cKG5GASwkS-vA-aHvZSQPIUdbwTkh6ld7o,33408 +transformers/models/dia/processing_dia.py,sha256=amS9PaB8pEgZ9RAKytZH_KH_NNK7DVnYA9NBBTuY8jE,19969 +transformers/models/dia/tokenization_dia.py,sha256=O4dTQjoErcFLHF5IbLHp2IdlPj_8POhLfkiRr0p0BNI,4511 +transformers/models/dialogpt/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +transformers/models/dialogpt/__pycache__/__init__.cpython-313.pyc,, +transformers/models/diffllama/__init__.py,sha256=Yosk5eQ82PblntLff-bL3pfJZ-AVKp5jbQK5R2SLVc8,1004 +transformers/models/diffllama/__pycache__/__init__.cpython-313.pyc,, +transformers/models/diffllama/__pycache__/configuration_diffllama.cpython-313.pyc,, +transformers/models/diffllama/__pycache__/modeling_diffllama.cpython-313.pyc,, +transformers/models/diffllama/__pycache__/modular_diffllama.cpython-313.pyc,, +transformers/models/diffllama/configuration_diffllama.py,sha256=SQr6FM8h6EQCI-NNYYksvK0JSy2WN8q6aGQNDFbJAgA,10688 +transformers/models/diffllama/modeling_diffllama.py,sha256=J6Y22wNisZQU7NvCQoRisbLToPhLNW80Y_tkvwCqQVc,35434 +transformers/models/diffllama/modular_diffllama.py,sha256=4RuvdV8UMZpSbin7zLNmM40jQNrOtD3wW7Rt75W6beg,20665 +transformers/models/dinat/__init__.py,sha256=N0HykajUSY5KsvPQNUxc8jAuuJntmDJ-Dz8Qa8_sJ9E,991 +transformers/models/dinat/__pycache__/__init__.cpython-313.pyc,, +transformers/models/dinat/__pycache__/configuration_dinat.cpython-313.pyc,, +transformers/models/dinat/__pycache__/modeling_dinat.cpython-313.pyc,, +transformers/models/dinat/configuration_dinat.py,sha256=fhGXUqRCEkTgWL6rpPUF7J-W7usE4e7gl3DS_J99wMc,7356 +transformers/models/dinat/modeling_dinat.py,sha256=WZl4uGt1rJxWZAK4mmPGC6ms0q9wQbQ7nGOv4CDc9sI,33824 +transformers/models/dinov2/__init__.py,sha256=fDyp5N-KcJzO-vUeT3fZA8UbC21FfGEhDOlYNvXHHDc,1033 +transformers/models/dinov2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/dinov2/__pycache__/configuration_dinov2.cpython-313.pyc,, +transformers/models/dinov2/__pycache__/modeling_dinov2.cpython-313.pyc,, +transformers/models/dinov2/__pycache__/modeling_flax_dinov2.cpython-313.pyc,, +transformers/models/dinov2/configuration_dinov2.py,sha256=020F55Jhk4nwbpzxfDxzi77Poe-5OpuGZ-f-mxEDYFg,8291 +transformers/models/dinov2/modeling_dinov2.py,sha256=t3hgVd2dn1N8iUzIKJ-gcA0DeriWKZWtbm0CLTE9MVo,28717 +transformers/models/dinov2/modeling_flax_dinov2.py,sha256=dfmeUz3KQ9d7eoX0X0QqJPKLHZv7c8GOrT2VF9_g7zk,31050 +transformers/models/dinov2_with_registers/__init__.py,sha256=s0cefgSRnlIVcdZYV0qz3Q9X3IEChU7mkGbbnr2IH6E,1023 +transformers/models/dinov2_with_registers/__pycache__/__init__.cpython-313.pyc,, +transformers/models/dinov2_with_registers/__pycache__/configuration_dinov2_with_registers.cpython-313.pyc,, +transformers/models/dinov2_with_registers/__pycache__/modeling_dinov2_with_registers.cpython-313.pyc,, +transformers/models/dinov2_with_registers/__pycache__/modular_dinov2_with_registers.cpython-313.pyc,, +transformers/models/dinov2_with_registers/configuration_dinov2_with_registers.py,sha256=Jv2lhwSt3lSvN8BQuhsK6rmW1IKtSqm4un5Qsvdm6DI,8633 +transformers/models/dinov2_with_registers/modeling_dinov2_with_registers.py,sha256=IaOs9jbbbxczpdfYK7oJ1gZy9-GdtQoORzHdufSnOd8,31012 +transformers/models/dinov2_with_registers/modular_dinov2_with_registers.py,sha256=_8fvWMtffNpDsAvraYs-IERWEUO4pfjUUH3-Zwww6DM,19565 +transformers/models/dinov3_convnext/__init__.py,sha256=8VOE7Jnq3g2JBuwkVcsFoZEerdZF5dn2p5XZ26BMRY4,1011 +transformers/models/dinov3_convnext/__pycache__/__init__.cpython-313.pyc,, +transformers/models/dinov3_convnext/__pycache__/configuration_dinov3_convnext.cpython-313.pyc,, +transformers/models/dinov3_convnext/__pycache__/modeling_dinov3_convnext.cpython-313.pyc,, +transformers/models/dinov3_convnext/configuration_dinov3_convnext.py,sha256=1_jX9wksnEI0Ds7Jb4n5qpI8SNRsDRE5KggYETxwlZo,4279 +transformers/models/dinov3_convnext/modeling_dinov3_convnext.py,sha256=OksbEwowCgncflWokKOuq5eYBxC-IkFziPhp4UhuBN8,10992 +transformers/models/dinov3_vit/__init__.py,sha256=DYSqKAAuJCZAhQyu7pcechuSR025KnzcKNtJIafTq1E,1053 +transformers/models/dinov3_vit/__pycache__/__init__.cpython-313.pyc,, +transformers/models/dinov3_vit/__pycache__/configuration_dinov3_vit.cpython-313.pyc,, +transformers/models/dinov3_vit/__pycache__/image_processing_dinov3_vit_fast.cpython-313.pyc,, +transformers/models/dinov3_vit/__pycache__/modeling_dinov3_vit.cpython-313.pyc,, +transformers/models/dinov3_vit/__pycache__/modular_dinov3_vit.cpython-313.pyc,, +transformers/models/dinov3_vit/configuration_dinov3_vit.py,sha256=GsfLiJ4jFOjK35sL7UPULPsF3ObXMPcZvkOAyKEKikY,7421 +transformers/models/dinov3_vit/image_processing_dinov3_vit_fast.py,sha256=uuEZBAZp5qadAQ-sBuDna9geUlkVhdW7cV76VX0pzl0,3942 +transformers/models/dinov3_vit/modeling_dinov3_vit.py,sha256=efnMFAweyhnZkihc7P5X-vDxxHDm8rKWvwH3_ZRHNwU,22718 +transformers/models/dinov3_vit/modular_dinov3_vit.py,sha256=wuep7S-vBk_hEcD5gK9aiBFE9lVj1f_kG5A7zLoH2ts,17354 +transformers/models/distilbert/__init__.py,sha256=dKwCe9QsyAaNsdUJFMUa-vcuHPSQSuLKFoFBvK3cLEY,1178 +transformers/models/distilbert/__pycache__/__init__.cpython-313.pyc,, +transformers/models/distilbert/__pycache__/configuration_distilbert.cpython-313.pyc,, +transformers/models/distilbert/__pycache__/modeling_distilbert.cpython-313.pyc,, +transformers/models/distilbert/__pycache__/modeling_flax_distilbert.cpython-313.pyc,, +transformers/models/distilbert/__pycache__/modeling_tf_distilbert.cpython-313.pyc,, +transformers/models/distilbert/__pycache__/tokenization_distilbert.cpython-313.pyc,, +transformers/models/distilbert/__pycache__/tokenization_distilbert_fast.cpython-313.pyc,, +transformers/models/distilbert/configuration_distilbert.py,sha256=h2rBKH_a_aEdRDo-5JEHAbVwUf1-6Sy6xpNRjdLvhnE,6055 +transformers/models/distilbert/modeling_distilbert.py,sha256=LZAPfqyZmEXM6pwP7jZvekd7xEJPvC2X2vF7fI6ChkM,56462 +transformers/models/distilbert/modeling_flax_distilbert.py,sha256=U0jH7nehL1vEt1gPTDwA882r8erT6Ol_QnOhgv7owro,32922 +transformers/models/distilbert/modeling_tf_distilbert.py,sha256=tOX6XGcxJXcy3lPurE3r0pRjX0U4TMWExb-EQbFc4eA,48995 +transformers/models/distilbert/tokenization_distilbert.py,sha256=hpvjVFbpRKQ1rFKW9QoGzUf2Ds3DAYLe_ZRLWi2EdCo,20999 +transformers/models/distilbert/tokenization_distilbert_fast.py,sha256=GfYGXroocFvwRahr1KHTowy-qtYwoqeqmt31MWlf2E0,6827 +transformers/models/dit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +transformers/models/dit/__pycache__/__init__.cpython-313.pyc,, +transformers/models/doge/__init__.py,sha256=SU7Ea0BtJ8psgsDda2RcXdb1466OYN0l5IK7aOiuanY,1024 +transformers/models/doge/__pycache__/__init__.cpython-313.pyc,, +transformers/models/doge/__pycache__/configuration_doge.cpython-313.pyc,, +transformers/models/doge/__pycache__/modeling_doge.cpython-313.pyc,, +transformers/models/doge/__pycache__/modular_doge.cpython-313.pyc,, +transformers/models/doge/configuration_doge.py,sha256=nnuGXqrDVC9o1zt1g2sGqNy2nlwiH1hSv8HUHQQVF-c,13910 +transformers/models/doge/modeling_doge.py,sha256=1XMgpd9YzbT_WEQY01_NieKWhbQYvDcwQgKzkDI-jIE,36274 +transformers/models/doge/modular_doge.py,sha256=HQnQmH3Rs2CShqxicRXs81qCmqG_IS2VOXF8SKTNooM,37256 +transformers/models/donut/__init__.py,sha256=O1JOQtPxtjcxSfWgC_PUZkmfvcKzjPgNAakujNra1PA,1170 +transformers/models/donut/__pycache__/__init__.cpython-313.pyc,, +transformers/models/donut/__pycache__/configuration_donut_swin.cpython-313.pyc,, +transformers/models/donut/__pycache__/feature_extraction_donut.cpython-313.pyc,, +transformers/models/donut/__pycache__/image_processing_donut.cpython-313.pyc,, +transformers/models/donut/__pycache__/image_processing_donut_fast.cpython-313.pyc,, +transformers/models/donut/__pycache__/modeling_donut_swin.cpython-313.pyc,, +transformers/models/donut/__pycache__/processing_donut.cpython-313.pyc,, +transformers/models/donut/configuration_donut_swin.py,sha256=mHg0P4MRxMOw_IsHKFtBIuSuuY0tINGN3FmUImMqST8,5785 +transformers/models/donut/feature_extraction_donut.py,sha256=JfpHRB_aTYyBkySWUgofHHwGxIA8hpaS8NilnFsgIAU,1292 +transformers/models/donut/image_processing_donut.py,sha256=ZVtXfYgudu0LKhqzEb02TgjOUjGsDhg-z2bTkqhTPcE,22152 +transformers/models/donut/image_processing_donut_fast.py,sha256=JQ3LcYzC9JVAjUZPyk11oyVZmX3uMoxw628Zl_-F9Xs,10016 +transformers/models/donut/modeling_donut_swin.py,sha256=-Iz33safTU4MRpimlQLP4wSfJgyck6-AJUNHfXLszo8,45688 +transformers/models/donut/processing_donut.py,sha256=b8gOo7yxlIM-0aSh2zky5E9WiyVXc-5nYTIiQ0v0dMo,8620 +transformers/models/dots1/__init__.py,sha256=A2jXARtNWOrbWAW2SIrsvvydm2_2keRyUBbNEz0By-I,991 +transformers/models/dots1/__pycache__/__init__.cpython-313.pyc,, +transformers/models/dots1/__pycache__/configuration_dots1.cpython-313.pyc,, +transformers/models/dots1/__pycache__/modeling_dots1.cpython-313.pyc,, +transformers/models/dots1/__pycache__/modular_dots1.cpython-313.pyc,, +transformers/models/dots1/configuration_dots1.py,sha256=c_oMyjwI019zij4AnqwQfJSWGOA81HM7H3QKWwsZ-sE,10104 +transformers/models/dots1/modeling_dots1.py,sha256=ZoG9IsicPnjhXwsZJjrUv-vkguY5_hLT28yJoEcDDIo,27568 +transformers/models/dots1/modular_dots1.py,sha256=M350mYt43vL3zl2Wzwx2D08eK6mrGZfyMv4xDl23waM,3277 +transformers/models/dpr/__init__.py,sha256=z4FocLkQ_ckWtBZctTh-aeV1haJJY-lXF0ZRKuVbVkc,1099 +transformers/models/dpr/__pycache__/__init__.cpython-313.pyc,, +transformers/models/dpr/__pycache__/configuration_dpr.cpython-313.pyc,, +transformers/models/dpr/__pycache__/modeling_dpr.cpython-313.pyc,, +transformers/models/dpr/__pycache__/modeling_tf_dpr.cpython-313.pyc,, +transformers/models/dpr/__pycache__/tokenization_dpr.cpython-313.pyc,, +transformers/models/dpr/__pycache__/tokenization_dpr_fast.cpython-313.pyc,, +transformers/models/dpr/configuration_dpr.py,sha256=-DGEGi7rH0bSlWSh2WCFvB6cdZ6bJ8kO3u8xvhDS8mk,6432 +transformers/models/dpr/modeling_dpr.py,sha256=L0Kl_E2Cu740zGQPZCT7wpmLBECZNGaS3V3DzRN2a7A,22826 +transformers/models/dpr/modeling_tf_dpr.py,sha256=VpzB67gC0sMnnuVcAxUhHxxj-Lxjnf658LIOFvhKLRc,33857 +transformers/models/dpr/tokenization_dpr.py,sha256=KuNEnPczArBcq36g3p_eueA2Z5AG4Y7vtgqMFHstzE4,15834 +transformers/models/dpr/tokenization_dpr_fast.py,sha256=ClKoqvgOkO4nnQX8R0KwwMTl5gZhh1VLMURD_9nEN-o,16215 +transformers/models/dpt/__init__.py,sha256=7i4wNFCo8NTFsJQi7TO9u0VGxpZ8xg1_QNvD862jNk4,1114 +transformers/models/dpt/__pycache__/__init__.cpython-313.pyc,, +transformers/models/dpt/__pycache__/configuration_dpt.cpython-313.pyc,, +transformers/models/dpt/__pycache__/feature_extraction_dpt.cpython-313.pyc,, +transformers/models/dpt/__pycache__/image_processing_dpt.cpython-313.pyc,, +transformers/models/dpt/__pycache__/image_processing_dpt_fast.cpython-313.pyc,, +transformers/models/dpt/__pycache__/modeling_dpt.cpython-313.pyc,, +transformers/models/dpt/__pycache__/modular_dpt.cpython-313.pyc,, +transformers/models/dpt/configuration_dpt.py,sha256=6U3U6oThmJNDyQ7iwCjGVU5rdS-gUBFU36_O_uKGX3M,14845 +transformers/models/dpt/feature_extraction_dpt.py,sha256=oCMnm3Pf3cDqtuENmJyqiT0F6OOFKKC5AjwldSpx7t8,1276 +transformers/models/dpt/image_processing_dpt.py,sha256=bZaco6DG7X7UStdI5e5Wjsl9Z82YT4aNq8iNZwZxDe8,31693 +transformers/models/dpt/image_processing_dpt_fast.py,sha256=OA3em6yrstj5VfTrwzp3-E7a7DS7ERGD9IAdrk6cuNc,17120 +transformers/models/dpt/modeling_dpt.py,sha256=PZwFopcIFMfDIWwqWYFrqBbnrDr1DAX2M7qvyQF-_0U,51081 +transformers/models/dpt/modular_dpt.py,sha256=-YCUdGQypm_SDonFhnpMHs05BMVmO1ltGTURutGXn40,11912 +transformers/models/edgetam/__init__.py,sha256=oGzMZGNhQp2fv5UQEYcLdYLUIRQEnn3_vucmlOvmM_o,995 +transformers/models/edgetam/__pycache__/__init__.cpython-313.pyc,, +transformers/models/edgetam/__pycache__/configuration_edgetam.cpython-313.pyc,, +transformers/models/edgetam/__pycache__/modeling_edgetam.cpython-313.pyc,, +transformers/models/edgetam/__pycache__/modular_edgetam.cpython-313.pyc,, +transformers/models/edgetam/configuration_edgetam.py,sha256=U_Exgi-kJOOyte5M0XgSwGa-ppjEkf_-vahRDdwy4SI,15499 +transformers/models/edgetam/modeling_edgetam.py,sha256=QfyWU5ILRxSajUbEPrMt_DxUDyNH-y0RF01gKr33gWE,59351 +transformers/models/edgetam/modular_edgetam.py,sha256=f_gV2rV4sDyswahlRynyb8UixtQwOi278uowjOAzDvI,9703 +transformers/models/edgetam_video/__init__.py,sha256=6VuEzH4c1uAjYHV0np4nArGPyCPZxg8UTwukCpJnHbw,1023 +transformers/models/edgetam_video/__pycache__/__init__.cpython-313.pyc,, +transformers/models/edgetam_video/__pycache__/configuration_edgetam_video.cpython-313.pyc,, +transformers/models/edgetam_video/__pycache__/modeling_edgetam_video.cpython-313.pyc,, +transformers/models/edgetam_video/__pycache__/modular_edgetam_video.cpython-313.pyc,, +transformers/models/edgetam_video/configuration_edgetam_video.py,sha256=XaLD87Re-TWi3CBiMNWyhib6sJd6_YrOPMDc0g2vt0k,23699 +transformers/models/edgetam_video/modeling_edgetam_video.py,sha256=rKVpE1ylOrrEpTiW0P1nE1566GXu88D3DbgzkqNDLW0,143139 +transformers/models/edgetam_video/modular_edgetam_video.py,sha256=5Q9aQEUKTd-173isLwKPYcfvYYK_bO824UbPV8Kq8nA,57205 +transformers/models/efficientloftr/__init__.py,sha256=tG2whk3HBxtW2jyKIXiiHuLxzHhaGo8YmfxgZbX-57k,1116 +transformers/models/efficientloftr/__pycache__/__init__.cpython-313.pyc,, +transformers/models/efficientloftr/__pycache__/configuration_efficientloftr.cpython-313.pyc,, +transformers/models/efficientloftr/__pycache__/image_processing_efficientloftr.cpython-313.pyc,, +transformers/models/efficientloftr/__pycache__/image_processing_efficientloftr_fast.cpython-313.pyc,, +transformers/models/efficientloftr/__pycache__/modeling_efficientloftr.cpython-313.pyc,, +transformers/models/efficientloftr/configuration_efficientloftr.py,sha256=RjGJf6yTfjGNkneCvq8PuSbQ58h41cdkKg8GnJL7cWk,10423 +transformers/models/efficientloftr/image_processing_efficientloftr.py,sha256=lIEKp9nZLGBU86o_m2NUt_OvkFT1mzYnc0ndAKiDsn4,21505 +transformers/models/efficientloftr/image_processing_efficientloftr_fast.py,sha256=RGbuaRnYSv_sluhbmSKK4nZLNDOqokyQQ1ApJW09EZ4,12610 +transformers/models/efficientloftr/modeling_efficientloftr.py,sha256=TNTv_FS4cvrNF5h_L12e-oQDzlSHKnZVcMObXUyPQzg,58713 +transformers/models/efficientnet/__init__.py,sha256=0wxLCBxWBCh8uj4nH1syYJ76kRvUlMS6EUN5E2L2Qwc,1108 +transformers/models/efficientnet/__pycache__/__init__.cpython-313.pyc,, +transformers/models/efficientnet/__pycache__/configuration_efficientnet.cpython-313.pyc,, +transformers/models/efficientnet/__pycache__/image_processing_efficientnet.cpython-313.pyc,, +transformers/models/efficientnet/__pycache__/image_processing_efficientnet_fast.cpython-313.pyc,, +transformers/models/efficientnet/__pycache__/modeling_efficientnet.cpython-313.pyc,, +transformers/models/efficientnet/configuration_efficientnet.py,sha256=pzLFg-QlskKos0hEVJI55TihTkR0Kn_WvxRAtHQEN_E,7660 +transformers/models/efficientnet/image_processing_efficientnet.py,sha256=pNf4S1dZSS-eRUR0UESMKiNeUVyHx10SINHwtsRd4hE,18450 +transformers/models/efficientnet/image_processing_efficientnet_fast.py,sha256=f7pFk0mnkkHFjzpX4HhJDKGRVGEJ30jOR3Ka3JxaaeU,7878 +transformers/models/efficientnet/modeling_efficientnet.py,sha256=P0kfpJW6TcstFqVYIYhmJLg_IGuHRGmAslT00MRCs38,20255 +transformers/models/electra/__init__.py,sha256=e6DkZL6cjtWVsTx7tamR-zsyv0tuRYLbuYn-r-04P84,1160 +transformers/models/electra/__pycache__/__init__.cpython-313.pyc,, +transformers/models/electra/__pycache__/configuration_electra.cpython-313.pyc,, +transformers/models/electra/__pycache__/modeling_electra.cpython-313.pyc,, +transformers/models/electra/__pycache__/modeling_flax_electra.cpython-313.pyc,, +transformers/models/electra/__pycache__/modeling_tf_electra.cpython-313.pyc,, +transformers/models/electra/__pycache__/tokenization_electra.cpython-313.pyc,, +transformers/models/electra/__pycache__/tokenization_electra_fast.cpython-313.pyc,, +transformers/models/electra/configuration_electra.py,sha256=Q8udyq_AymQzkwnlrPurj4mdl9f4QU2l0YODKH523uc,9170 +transformers/models/electra/modeling_electra.py,sha256=oyBU8qwulBkK2yPUx9LYuohCfUGQ8NDvblZKQawTpXw,69813 +transformers/models/electra/modeling_flax_electra.py,sha256=I8aa02ZkCCKL6ch_L38BFxYmQK-EWuMnSEUsT_qUxPE,62613 +transformers/models/electra/modeling_tf_electra.py,sha256=dW-ED6HQKsLpr_QzQy3OqL-nMZBzmT_lhrlRz2bic08,78430 +transformers/models/electra/tokenization_electra.py,sha256=n0A4Wm-kdEhLKUkbn3dFM8nGWPV0vV71AzpH-YigFmk,20108 +transformers/models/electra/tokenization_electra_fast.py,sha256=vIX5oBKDTWD2Vn_CHNOUFX9Y3PB4QTUfxGyZNUswEVY,6590 +transformers/models/emu3/__init__.py,sha256=VEBLADqeToacty2xd3Zu0F_fLQRxvhfiKPkuB9jwcFM,1070 +transformers/models/emu3/__pycache__/__init__.cpython-313.pyc,, +transformers/models/emu3/__pycache__/configuration_emu3.cpython-313.pyc,, +transformers/models/emu3/__pycache__/image_processing_emu3.cpython-313.pyc,, +transformers/models/emu3/__pycache__/modeling_emu3.cpython-313.pyc,, +transformers/models/emu3/__pycache__/modular_emu3.cpython-313.pyc,, +transformers/models/emu3/__pycache__/processing_emu3.cpython-313.pyc,, +transformers/models/emu3/configuration_emu3.py,sha256=1zPD80z9vAL54uZOgcBFqg5R6qXdLunsJXfYjH61eUc,16196 +transformers/models/emu3/image_processing_emu3.py,sha256=97ca3p77vjZMHvdnAxhcVzI0XrTG3hADD3DJAp__2T8,27279 +transformers/models/emu3/modeling_emu3.py,sha256=jUgHbtIPOlAx4FLVQsbgS80pYMWeS7lDU_-vR37KpP4,65129 +transformers/models/emu3/modular_emu3.py,sha256=VaJz03Jlu2vByoQBTfcDtSJXBy6AzFWRkgYmjqsx96w,46503 +transformers/models/emu3/processing_emu3.py,sha256=ECEglZiz4JzBpGiyvhvsYDxsiZ3Dmapgw-FNxzfoyVg,11793 +transformers/models/encodec/__init__.py,sha256=QbO9yEfCaRwYKbK0vvmwKMbqRAToyos-HTHhRmf7n5s,1041 +transformers/models/encodec/__pycache__/__init__.cpython-313.pyc,, +transformers/models/encodec/__pycache__/configuration_encodec.cpython-313.pyc,, +transformers/models/encodec/__pycache__/feature_extraction_encodec.cpython-313.pyc,, +transformers/models/encodec/__pycache__/modeling_encodec.cpython-313.pyc,, +transformers/models/encodec/configuration_encodec.py,sha256=yi016nmXOdgM9NgSSWJW6rDDHpzTIPJJ8gG9OHZK83w,8705 +transformers/models/encodec/feature_extraction_encodec.py,sha256=ANwwpLKhArrcXtqC4eLpFFeJ2fZReWsaAtvdDuCcUYg,9944 +transformers/models/encodec/modeling_encodec.py,sha256=hmDympFkHTgJCVlAzeQFz-e90AgSf8aF9gA-OeurUyY,34606 +transformers/models/encoder_decoder/__init__.py,sha256=wxXN9-4nCvYICfq8pE592rdRiQXK7S69V2cWGVQyIkw,1107 +transformers/models/encoder_decoder/__pycache__/__init__.cpython-313.pyc,, +transformers/models/encoder_decoder/__pycache__/configuration_encoder_decoder.cpython-313.pyc,, +transformers/models/encoder_decoder/__pycache__/modeling_encoder_decoder.cpython-313.pyc,, +transformers/models/encoder_decoder/__pycache__/modeling_flax_encoder_decoder.cpython-313.pyc,, +transformers/models/encoder_decoder/__pycache__/modeling_tf_encoder_decoder.cpython-313.pyc,, +transformers/models/encoder_decoder/configuration_encoder_decoder.py,sha256=3RReXpXx5UcFH8EEfzAsyQrbJ9FmHuHfZ3vx-Br1-54,4596 +transformers/models/encoder_decoder/modeling_encoder_decoder.py,sha256=7cohVOyp3j7WZV_FMZ37Owz2d4YwNp5C5g3lf9wigcA,30038 +transformers/models/encoder_decoder/modeling_flax_encoder_decoder.py,sha256=U3bHgOkJxZQRUqXOacW7nExXg4j2zI9850rSDCWoFDU,43595 +transformers/models/encoder_decoder/modeling_tf_encoder_decoder.py,sha256=qS8cY9FGuUbFajw75i496zVBPJJIf17ZxVKkMAfcKQ8,34188 +transformers/models/eomt/__init__.py,sha256=yXVhFSLXI_umGHqTtRHHaTevf4KwrA9LOldvY7DfuFk,1076 +transformers/models/eomt/__pycache__/__init__.cpython-313.pyc,, +transformers/models/eomt/__pycache__/configuration_eomt.cpython-313.pyc,, +transformers/models/eomt/__pycache__/image_processing_eomt.cpython-313.pyc,, +transformers/models/eomt/__pycache__/image_processing_eomt_fast.cpython-313.pyc,, +transformers/models/eomt/__pycache__/modeling_eomt.cpython-313.pyc,, +transformers/models/eomt/__pycache__/modular_eomt.cpython-313.pyc,, +transformers/models/eomt/configuration_eomt.py,sha256=nvH6qjar0r7ejH9yM_WWvz8aiVQJG188ZdLedGMTOUw,8069 +transformers/models/eomt/image_processing_eomt.py,sha256=9LiR3mUdge-GsB2aTzhm0ypSCzYBru78MGaIv9zDprY,41160 +transformers/models/eomt/image_processing_eomt_fast.py,sha256=5HpXCUeyAaSfm_NCC1tigaJrcexCnZA0gKjbX83rG7E,21824 +transformers/models/eomt/modeling_eomt.py,sha256=OONL_groETQtZ3eqBM4kG0v_NlVnSBNYSbPskAhXKLk,54624 +transformers/models/eomt/modular_eomt.py,sha256=H_LNUkhTkButdU3iT_nzfUQClp8vQDZxCTXZgq3hvo0,25334 +transformers/models/ernie/__init__.py,sha256=TyzaXpzGwu-WqsIn1tavDqa7BCV9X-mPho4JDa9gk0I,991 +transformers/models/ernie/__pycache__/__init__.cpython-313.pyc,, +transformers/models/ernie/__pycache__/configuration_ernie.cpython-313.pyc,, +transformers/models/ernie/__pycache__/modeling_ernie.cpython-313.pyc,, +transformers/models/ernie/configuration_ernie.py,sha256=_1shyRgpTVMQS2z7kEW8FF7spluVDc-azldGq8Clr4Y,7719 +transformers/models/ernie/modeling_ernie.py,sha256=lyvxE0tqlqlOhQvVhn_tmPyneE3ooH_qQd20whEa1ZY,76788 +transformers/models/ernie4_5/__init__.py,sha256=5tqpitaOWvT1CdXTgMtMLCKIkUG683y3jdcTZ8yuwfM,997 +transformers/models/ernie4_5/__pycache__/__init__.cpython-313.pyc,, +transformers/models/ernie4_5/__pycache__/configuration_ernie4_5.cpython-313.pyc,, +transformers/models/ernie4_5/__pycache__/modeling_ernie4_5.cpython-313.pyc,, +transformers/models/ernie4_5/__pycache__/modular_ernie4_5.cpython-313.pyc,, +transformers/models/ernie4_5/configuration_ernie4_5.py,sha256=Sg3qqYRBjErH7dPcyPdfe3CKYnaxSpjo5d2tI5RAVvs,10671 +transformers/models/ernie4_5/modeling_ernie4_5.py,sha256=7IrKhtS_IoJPG3-EWs1dZAM2hW6sNz3FXtAbwCFW_ok,20585 +transformers/models/ernie4_5/modular_ernie4_5.py,sha256=ake2tS9BcYGYQXtIeL2qvCjEsSpqItrWOtaxJTy6_tU,5604 +transformers/models/ernie4_5_moe/__init__.py,sha256=MJaAQxyB3YypXN79FGJpkSvnX6KWt86iunOFXjiA7a4,1005 +transformers/models/ernie4_5_moe/__pycache__/__init__.cpython-313.pyc,, +transformers/models/ernie4_5_moe/__pycache__/configuration_ernie4_5_moe.cpython-313.pyc,, +transformers/models/ernie4_5_moe/__pycache__/modeling_ernie4_5_moe.cpython-313.pyc,, +transformers/models/ernie4_5_moe/__pycache__/modular_ernie4_5_moe.cpython-313.pyc,, +transformers/models/ernie4_5_moe/configuration_ernie4_5_moe.py,sha256=HpcWwQdyQFpvYwFzMBptIsUq90mRDxNVDQEXpJGJUc8,13489 +transformers/models/ernie4_5_moe/modeling_ernie4_5_moe.py,sha256=PpUWmG-NQQurNOFitkOpYXy_SshunEgc1z8WuHnDtKY,33543 +transformers/models/ernie4_5_moe/modular_ernie4_5_moe.py,sha256=tSKFjK37YmmHer0AbHqLAR4lZ3oOMmQBp8vW3YVAePI,14446 +transformers/models/esm/__init__.py,sha256=muSqvVMt6mySkoAm7MjweiFHJVBSj70LlakjHmZ6PEE,1094 +transformers/models/esm/__pycache__/__init__.cpython-313.pyc,, +transformers/models/esm/__pycache__/configuration_esm.cpython-313.pyc,, +transformers/models/esm/__pycache__/modeling_esm.cpython-313.pyc,, +transformers/models/esm/__pycache__/modeling_esmfold.cpython-313.pyc,, +transformers/models/esm/__pycache__/modeling_tf_esm.cpython-313.pyc,, +transformers/models/esm/__pycache__/tokenization_esm.cpython-313.pyc,, +transformers/models/esm/configuration_esm.py,sha256=ehwkp9UcXcXXp6dphMO7cqdn-G1Bv1LUB4sohOvWy6Y,14436 +transformers/models/esm/modeling_esm.py,sha256=IXr6NI6vcDlij-jwdcFsK-yi8HQ84zy50LHt9n8S53k,43627 +transformers/models/esm/modeling_esmfold.py,sha256=wc-jMGK2mCfAMX1Q2JpRRIgvDkxf8kM3vPTnEBHEm8I,86171 +transformers/models/esm/modeling_tf_esm.py,sha256=5GsQDy8NmoHzk0s9rr1PhBQXmGke7FgtKYxd6XN0k_k,68985 +transformers/models/esm/openfold_utils/__init__.py,sha256=Xy2uqvFsLC8Ax-OOce5PgoBDiZgEJgJPqs__p5SBWUY,446 +transformers/models/esm/openfold_utils/__pycache__/__init__.cpython-313.pyc,, +transformers/models/esm/openfold_utils/__pycache__/chunk_utils.cpython-313.pyc,, +transformers/models/esm/openfold_utils/__pycache__/data_transforms.cpython-313.pyc,, +transformers/models/esm/openfold_utils/__pycache__/feats.cpython-313.pyc,, +transformers/models/esm/openfold_utils/__pycache__/loss.cpython-313.pyc,, +transformers/models/esm/openfold_utils/__pycache__/protein.cpython-313.pyc,, +transformers/models/esm/openfold_utils/__pycache__/residue_constants.cpython-313.pyc,, +transformers/models/esm/openfold_utils/__pycache__/rigid_utils.cpython-313.pyc,, +transformers/models/esm/openfold_utils/__pycache__/tensor_utils.cpython-313.pyc,, +transformers/models/esm/openfold_utils/chunk_utils.py,sha256=dwwoG_7zj-juGYg3bb1QZa0LPASNsRm7I1Oq9optMwk,14390 +transformers/models/esm/openfold_utils/data_transforms.py,sha256=Q5J_BpJ_8Fa5fZ8nP6kPB5ops-Y4MydSQkwZ-_yMDBA,3688 +transformers/models/esm/openfold_utils/feats.py,sha256=QCYupsVINo5jJuwYk38TejNYkPlGm6Kfc1YpNUxpI8s,8355 +transformers/models/esm/openfold_utils/loss.py,sha256=sndbYMMXuL0KIHlzq7ZJUlQoIRMy2Q3ZGl3h20BR1rg,3692 +transformers/models/esm/openfold_utils/protein.py,sha256=Ii9o1j_qa6cJgcJ8WBNvrVXhvuUX3668sOf1WCkJflo,11497 +transformers/models/esm/openfold_utils/residue_constants.py,sha256=-y44cw8jjQJakRSjn4MAMCeGhFSHoPzUCiMv9MfdGes,37875 +transformers/models/esm/openfold_utils/rigid_utils.py,sha256=oM1q5gGBukDtpQqrJosTmfASUEQRjM7Lo-u2PR-W6Cs,41006 +transformers/models/esm/openfold_utils/tensor_utils.py,sha256=94wNOGOftULOVB_WsyH6b-Sv38Ny1QCa4R6he3iRyl8,4763 +transformers/models/esm/tokenization_esm.py,sha256=HcbVQ9J-e7NjuhVSqcHMj-2PIGTtok-5zRRT-YhffdE,5379 +transformers/models/evolla/__init__.py,sha256=pOj8KGoc9jqtS_PYTeNCxZUtQrlFR_txE-kdZpiAkCw,1030 +transformers/models/evolla/__pycache__/__init__.cpython-313.pyc,, +transformers/models/evolla/__pycache__/configuration_evolla.cpython-313.pyc,, +transformers/models/evolla/__pycache__/modeling_evolla.cpython-313.pyc,, +transformers/models/evolla/__pycache__/modular_evolla.cpython-313.pyc,, +transformers/models/evolla/__pycache__/processing_evolla.cpython-313.pyc,, +transformers/models/evolla/configuration_evolla.py,sha256=jz9pKidmvndFIVcAAlpjw7BTb3i-Mv94ac9yhLprNlA,13817 +transformers/models/evolla/modeling_evolla.py,sha256=5Py8-WSMCji2gMMC0wYepB1sYQIF6I9ZC5R3Qhytd5Q,68763 +transformers/models/evolla/modular_evolla.py,sha256=X2Th6aUYKrqwYYb7PFGrpaSByPwr_siJp_VBnrwE0qA,41304 +transformers/models/evolla/processing_evolla.py,sha256=l__liYSr-NmfClwzRCL88HoIAUs1TZpFYVyn4edYzlU,11480 +transformers/models/exaone4/__init__.py,sha256=gUDbb0olRjqxaPnB3APYKYKRlDPG2phGkfrmf7mIVD4,1018 +transformers/models/exaone4/__pycache__/__init__.cpython-313.pyc,, +transformers/models/exaone4/__pycache__/configuration_exaone4.cpython-313.pyc,, +transformers/models/exaone4/__pycache__/modeling_exaone4.cpython-313.pyc,, +transformers/models/exaone4/__pycache__/modular_exaone4.cpython-313.pyc,, +transformers/models/exaone4/configuration_exaone4.py,sha256=4BHo8dBOpEPtp-4n-ZV9rSB-JtkGdlIYrWtgE6w5iwE,12514 +transformers/models/exaone4/modeling_exaone4.py,sha256=G9pQVhlp20FyCUeEj7WwRPbxTlGZhv57lxP4DG8ZilE,23692 +transformers/models/exaone4/modular_exaone4.py,sha256=WhbMPXjKVReTQruyPqK9S9aMpKVSC1xX-UsBD-_zOSs,23645 +transformers/models/falcon/__init__.py,sha256=qmBlF_xusyrueKMfriC2ldVrHzeLIT7ruSdduMODuE4,993 +transformers/models/falcon/__pycache__/__init__.cpython-313.pyc,, +transformers/models/falcon/__pycache__/configuration_falcon.cpython-313.pyc,, +transformers/models/falcon/__pycache__/modeling_falcon.cpython-313.pyc,, +transformers/models/falcon/configuration_falcon.py,sha256=5vh10LAkioX6y3qJh84u3SlXmu3gPArXNMe1apM7f9g,10917 +transformers/models/falcon/modeling_falcon.py,sha256=H0LI3p32Zu7IxZT3EfabgEoLNWvU5vAs7_yzK2k_0gs,63968 +transformers/models/falcon_h1/__init__.py,sha256=cpix3f3f_xMDLf2OLuyYZULnb7enZl3UZapPQuf0YZc,1012 +transformers/models/falcon_h1/__pycache__/__init__.cpython-313.pyc,, +transformers/models/falcon_h1/__pycache__/configuration_falcon_h1.cpython-313.pyc,, +transformers/models/falcon_h1/__pycache__/modeling_falcon_h1.cpython-313.pyc,, +transformers/models/falcon_h1/__pycache__/modular_falcon_h1.cpython-313.pyc,, +transformers/models/falcon_h1/configuration_falcon_h1.py,sha256=ql8WPS_TKtf2KPz0Ko_WxJpf7NVnyufn-B-j20kewfc,13894 +transformers/models/falcon_h1/modeling_falcon_h1.py,sha256=ho4wA8e3m2zRNW0rf4hYlfilnLGOlh9FaHB4dP4swr8,74104 +transformers/models/falcon_h1/modular_falcon_h1.py,sha256=GrayFTwl3omibc-3Eq8fQCZmT20yf3dg2sBgc3LhJ9w,62060 +transformers/models/falcon_mamba/__init__.py,sha256=Czo-T_Nt73nvRbK-yJEZAYsU3Bxu4i1fOxFuPosiFPw,1005 +transformers/models/falcon_mamba/__pycache__/__init__.cpython-313.pyc,, +transformers/models/falcon_mamba/__pycache__/configuration_falcon_mamba.cpython-313.pyc,, +transformers/models/falcon_mamba/__pycache__/modeling_falcon_mamba.cpython-313.pyc,, +transformers/models/falcon_mamba/__pycache__/modular_falcon_mamba.cpython-313.pyc,, +transformers/models/falcon_mamba/configuration_falcon_mamba.py,sha256=xbRMQ4ZHRpJmNHxVJh2tIYtnMJWAME0aDbKe1oMY_34,8846 +transformers/models/falcon_mamba/modeling_falcon_mamba.py,sha256=FNLdMUwOlFNjCHyQazOHu5czgnwpQCQL2o22jkI41Wo,43095 +transformers/models/falcon_mamba/modular_falcon_mamba.py,sha256=RQ0kTvNF7LiFgGduy7fOvhIwCBVCRB4liCWJQx5ajYQ,26342 +transformers/models/fastspeech2_conformer/__init__.py,sha256=pILmX51CcqSiFGtl_dsX1yW2S_QugA3UHAT8f4psOtA,1077 +transformers/models/fastspeech2_conformer/__pycache__/__init__.cpython-313.pyc,, +transformers/models/fastspeech2_conformer/__pycache__/configuration_fastspeech2_conformer.cpython-313.pyc,, +transformers/models/fastspeech2_conformer/__pycache__/modeling_fastspeech2_conformer.cpython-313.pyc,, +transformers/models/fastspeech2_conformer/__pycache__/tokenization_fastspeech2_conformer.cpython-313.pyc,, +transformers/models/fastspeech2_conformer/configuration_fastspeech2_conformer.py,sha256=TZ6a2rSWE3ikugOBx_sr4tULm2FpX8Qtj2S7MLBdnNQ,24656 +transformers/models/fastspeech2_conformer/modeling_fastspeech2_conformer.py,sha256=x5A-irRigdYUA7wZoHIWznuitR26YoEdFA-Anmz3c7I,69493 +transformers/models/fastspeech2_conformer/tokenization_fastspeech2_conformer.py,sha256=x8b0G-lsibtRBg-I3FzLBHC1YhiTmr8A2o6V8LbEz6M,6258 +transformers/models/flaubert/__init__.py,sha256=LdGmxq7pcDPVcvqO1ol7VYtpjKKCAQuiJ1ISrNT9nEs,1078 +transformers/models/flaubert/__pycache__/__init__.cpython-313.pyc,, +transformers/models/flaubert/__pycache__/configuration_flaubert.cpython-313.pyc,, +transformers/models/flaubert/__pycache__/modeling_flaubert.cpython-313.pyc,, +transformers/models/flaubert/__pycache__/modeling_tf_flaubert.cpython-313.pyc,, +transformers/models/flaubert/__pycache__/tokenization_flaubert.cpython-313.pyc,, +transformers/models/flaubert/configuration_flaubert.py,sha256=920NSmtA4I1NbeTk642E8OvKEWD9TnwBggtaIGyx70U,11250 +transformers/models/flaubert/modeling_flaubert.py,sha256=o91gC3wmWAM4HXfeAtU8BwMQzlqmOv-Po_HZljFL2zM,80054 +transformers/models/flaubert/modeling_tf_flaubert.py,sha256=zeZWpnYi56nPKArmP43f6TTDPOxxObhgjd0CXJ4Y3Qo,57170 +transformers/models/flaubert/tokenization_flaubert.py,sha256=ACYpzkElWcCyW9lJX9mRqh1-uEI9qqV2IQIXSr8JhPk,20970 +transformers/models/flava/__init__.py,sha256=UZ-PnfpalIOh2pPXWj_WSjsxjLgMBh2kKVyyLsNTUOk,1160 +transformers/models/flava/__pycache__/__init__.cpython-313.pyc,, +transformers/models/flava/__pycache__/configuration_flava.cpython-313.pyc,, +transformers/models/flava/__pycache__/feature_extraction_flava.cpython-313.pyc,, +transformers/models/flava/__pycache__/image_processing_flava.cpython-313.pyc,, +transformers/models/flava/__pycache__/image_processing_flava_fast.cpython-313.pyc,, +transformers/models/flava/__pycache__/modeling_flava.cpython-313.pyc,, +transformers/models/flava/__pycache__/processing_flava.cpython-313.pyc,, +transformers/models/flava/configuration_flava.py,sha256=5rnIMzvISrCpS012IshWXbiLyc4gvSrhWursvORJUF0,34021 +transformers/models/flava/feature_extraction_flava.py,sha256=fZzf449ea7VNw1xyNfCuoa_e2pMEfGSxqNTX9YdoE5I,1314 +transformers/models/flava/image_processing_flava.py,sha256=qimlDnZCGEENOX1KHI09-5TKHebHnRcIQ8BJhgIRWaw,37688 +transformers/models/flava/image_processing_flava_fast.py,sha256=iJbfdyw-KeQwWURqvrmYnG7UmJecgxFnxOEymzeJyCM,22502 +transformers/models/flava/modeling_flava.py,sha256=ri_xl_yKna46z6-KiTcAtYkUprQF6Jh2EaE75zUK5lU,94428 +transformers/models/flava/processing_flava.py,sha256=VhcJAQLYGs-TqpmOK3uw9eL9yxI1XQc2Y_lXZ3IIpfc,3899 +transformers/models/flex_olmo/__init__.py,sha256=g2YZth5tFESCUDBUKffzcqm2Z3_u5jYc90XzKMj1jk0,1015 +transformers/models/flex_olmo/__pycache__/__init__.cpython-313.pyc,, +transformers/models/flex_olmo/__pycache__/configuration_flex_olmo.cpython-313.pyc,, +transformers/models/flex_olmo/__pycache__/modeling_flex_olmo.cpython-313.pyc,, +transformers/models/flex_olmo/__pycache__/modular_flex_olmo.cpython-313.pyc,, +transformers/models/flex_olmo/configuration_flex_olmo.py,sha256=4C_2V1KzreoxBUhkba3bFQRgmQPTNdCPJ3s0ZIf4MWE,10575 +transformers/models/flex_olmo/modeling_flex_olmo.py,sha256=u4z_F-UtdSoNk31itIjk-g_fu1Zl7TXXWwNDrcs7FLw,31574 +transformers/models/flex_olmo/modular_flex_olmo.py,sha256=hZp4dbwGB5nA29nYgyzl6J0ubIs139Cggg4MYe-QCHc,15595 +transformers/models/florence2/__init__.py,sha256=H4rJT4tWJithwWolb041i7SM693Y_-oO8hhdePbjYpY,1039 +transformers/models/florence2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/florence2/__pycache__/configuration_florence2.cpython-313.pyc,, +transformers/models/florence2/__pycache__/modeling_florence2.cpython-313.pyc,, +transformers/models/florence2/__pycache__/modular_florence2.cpython-313.pyc,, +transformers/models/florence2/__pycache__/processing_florence2.cpython-313.pyc,, +transformers/models/florence2/configuration_florence2.py,sha256=elSEHjPwVE3GcOUCJDVktLEINt_lD2BIJCSAnFqlFhU,9623 +transformers/models/florence2/modeling_florence2.py,sha256=q3mw6pmDDSyERuEyZf_e_0A77FP-C9CVCyO9nTBB1yk,43633 +transformers/models/florence2/modular_florence2.py,sha256=iaQhlKJuACheRoiESyPEZb54_6FiJr8IxHyxxNga1SU,78624 +transformers/models/florence2/processing_florence2.py,sha256=D_Th63wqNZJBgHjNp4FH4SX0yGx8sLPFO_h-qbJU8bM,36775 +transformers/models/fnet/__init__.py,sha256=V3nuz_DsD_K5-RuL-Gt4hr5FVtNz12s46O_Vtx_xvCY,1068 +transformers/models/fnet/__pycache__/__init__.cpython-313.pyc,, +transformers/models/fnet/__pycache__/configuration_fnet.cpython-313.pyc,, +transformers/models/fnet/__pycache__/modeling_fnet.cpython-313.pyc,, +transformers/models/fnet/__pycache__/tokenization_fnet.cpython-313.pyc,, +transformers/models/fnet/__pycache__/tokenization_fnet_fast.cpython-313.pyc,, +transformers/models/fnet/configuration_fnet.py,sha256=oZVGszdEYsE-nJnpSlmU3r4tENCfwHnNKaL4NmrD7N4,5567 +transformers/models/fnet/modeling_fnet.py,sha256=eVQU_D8tFJSy_XEbFbkHWlguyNdOInCx9mTYHdMAP2k,44193 +transformers/models/fnet/tokenization_fnet.py,sha256=1oHKKZ05BkW9gY2Ibq__USJVrfxIL6ee2_kJK3vTH_Y,13537 +transformers/models/fnet/tokenization_fnet_fast.py,sha256=Ed77wG8t5cE351Rx2shX98ysFjQFasEor4-U0zj2wYk,6841 +transformers/models/focalnet/__init__.py,sha256=kFk7pYv4troBIWdCYosHMKh8PAnpXqjlxaRRQ5adkG0,997 +transformers/models/focalnet/__pycache__/__init__.cpython-313.pyc,, +transformers/models/focalnet/__pycache__/configuration_focalnet.cpython-313.pyc,, +transformers/models/focalnet/__pycache__/modeling_focalnet.cpython-313.pyc,, +transformers/models/focalnet/configuration_focalnet.py,sha256=L6CS3mcLLDZTIFeiTqweu8W1MogNQq8ZMrIiD_-g1x4,8057 +transformers/models/focalnet/modeling_focalnet.py,sha256=mDIxh2fvOw-AOZ-0P4UWaUw3Dg0Wiws19HHLa2VamQw,37537 +transformers/models/fsmt/__init__.py,sha256=u_Xx7d3qDicqwR_W0js1h2wPiLKWM1RlMu7fsBdIHy4,1026 +transformers/models/fsmt/__pycache__/__init__.cpython-313.pyc,, +transformers/models/fsmt/__pycache__/configuration_fsmt.cpython-313.pyc,, +transformers/models/fsmt/__pycache__/modeling_fsmt.cpython-313.pyc,, +transformers/models/fsmt/__pycache__/tokenization_fsmt.cpython-313.pyc,, +transformers/models/fsmt/configuration_fsmt.py,sha256=TWNVhQROEA1WMCL6j3HX3nRkyhVsB2n2kwNb6QkE4pY,10363 +transformers/models/fsmt/modeling_fsmt.py,sha256=n7TphdY7Y6y3d7HonFlp4TBw0X0wqx6reIopQAT05vY,53104 +transformers/models/fsmt/tokenization_fsmt.py,sha256=86Txz3pYk6fL5nWcqfbpBSo_EuaC-O6tqqHo5zu9GUw,17944 +transformers/models/funnel/__init__.py,sha256=087Y3Xz6y0HA5SgKe-s2z-ZzUIq1u_axxCRh2__gVro,1182 +transformers/models/funnel/__pycache__/__init__.cpython-313.pyc,, +transformers/models/funnel/__pycache__/configuration_funnel.cpython-313.pyc,, +transformers/models/funnel/__pycache__/modeling_funnel.cpython-313.pyc,, +transformers/models/funnel/__pycache__/modeling_tf_funnel.cpython-313.pyc,, +transformers/models/funnel/__pycache__/tokenization_funnel.cpython-313.pyc,, +transformers/models/funnel/__pycache__/tokenization_funnel_fast.cpython-313.pyc,, +transformers/models/funnel/configuration_funnel.py,sha256=b53gi5CW7KpmzFFAM2klOVODwb1Jq30XbzX1rINu7x8,7682 +transformers/models/funnel/modeling_funnel.py,sha256=k5rkMNmHEkJ2YON2bDDmb5a9krS9z-ZjxZR-1OSIuMo,61585 +transformers/models/funnel/modeling_tf_funnel.py,sha256=ZMdqwNPK3WkhzO-HAfaDtbtoctQwEDjGsfiHsJ_CXWg,80735 +transformers/models/funnel/tokenization_funnel.py,sha256=2VRzAH-LPCQcL_gm0LFKsoIPdAKajzCC3gUhrBWuPRE,22685 +transformers/models/funnel/tokenization_funnel_fast.py,sha256=dMo_pnTLyD926bjcPiormC4L_l6oV3W-Xt7xy858Mfs,8666 +transformers/models/fuyu/__init__.py,sha256=NcygIhTFvIZzXPZUReC1WYReGAVINSpG0xW7KqEmd8c,1065 +transformers/models/fuyu/__pycache__/__init__.cpython-313.pyc,, +transformers/models/fuyu/__pycache__/configuration_fuyu.cpython-313.pyc,, +transformers/models/fuyu/__pycache__/image_processing_fuyu.cpython-313.pyc,, +transformers/models/fuyu/__pycache__/modeling_fuyu.cpython-313.pyc,, +transformers/models/fuyu/__pycache__/processing_fuyu.cpython-313.pyc,, +transformers/models/fuyu/configuration_fuyu.py,sha256=7jgdM3jnzy-Z-IZGjx3VgAhuS5_bnEa7FIO8y6kVTaw,10198 +transformers/models/fuyu/image_processing_fuyu.py,sha256=gwh-EQC7Fd5JajX9bNgnW5SxLnCljbFMQmvVD4-tBVI,33659 +transformers/models/fuyu/modeling_fuyu.py,sha256=4Ezt0hQL6I6uLxTuGyTPzybp73ExTNMf2v2KjNGoHjc,18161 +transformers/models/fuyu/processing_fuyu.py,sha256=uLWX5xTMmnScTYgVxQReqdFGr17V0vK6PDAJ3zSLs-E,36693 +transformers/models/gemma/__init__.py,sha256=xXoIfeCXNQOEnARxU3QucfH5mn-a_AE4wp69YkykT50,1111 +transformers/models/gemma/__pycache__/__init__.cpython-313.pyc,, +transformers/models/gemma/__pycache__/configuration_gemma.cpython-313.pyc,, +transformers/models/gemma/__pycache__/modeling_flax_gemma.cpython-313.pyc,, +transformers/models/gemma/__pycache__/modeling_gemma.cpython-313.pyc,, +transformers/models/gemma/__pycache__/modular_gemma.cpython-313.pyc,, +transformers/models/gemma/__pycache__/tokenization_gemma.cpython-313.pyc,, +transformers/models/gemma/__pycache__/tokenization_gemma_fast.cpython-313.pyc,, +transformers/models/gemma/configuration_gemma.py,sha256=LWnoaGz53xxPIbZT1WzQEuF7Rt_MplPVQ9NvP55XE9I,8375 +transformers/models/gemma/modeling_flax_gemma.py,sha256=rRGlPaBXI_lxtLz47GfwZabrdzM8EHctNgitlXNjz4Q,32439 +transformers/models/gemma/modeling_gemma.py,sha256=zGwBZdq3Tiik9WdLgTzqr6nEis8VXaSp4mklw1Kqbok,21387 +transformers/models/gemma/modular_gemma.py,sha256=tRFQEhUSMg439svs1Gc0gvITGCIPPG3OXmsmS_WMAtc,20809 +transformers/models/gemma/tokenization_gemma.py,sha256=AcVuuIvQS7kCoS03rX8VkC86S_ywQYKPUGq4ouFXdUY,14229 +transformers/models/gemma/tokenization_gemma_fast.py,sha256=iEJm0bejSYb1DmmXbb6UuRZaeGX0SLG1uCx5v625hTI,8097 +transformers/models/gemma2/__init__.py,sha256=H0jWJX-AcGRTjdzkGJagKnjB6GnpqVUG4ODFhMF9OWM,993 +transformers/models/gemma2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/gemma2/__pycache__/configuration_gemma2.cpython-313.pyc,, +transformers/models/gemma2/__pycache__/modeling_gemma2.cpython-313.pyc,, +transformers/models/gemma2/__pycache__/modular_gemma2.cpython-313.pyc,, +transformers/models/gemma2/configuration_gemma2.py,sha256=QfNcLy2BpJAkBadWHmWPzh4AKW1bMHVOQMsizN8lUgs,9602 +transformers/models/gemma2/modeling_gemma2.py,sha256=-ne_FxFIu0e6SlPrygL05s3NzaFeouI66kopZTm9IDw,25442 +transformers/models/gemma2/modular_gemma2.py,sha256=lV0G2E1ciXujKy6AxQ253Cgn95ehxvLMZBvt5OT2SI8,24906 +transformers/models/gemma3/__init__.py,sha256=yDt-ADg8e57SlRlpfsC7KzQCeYYgUrTz9ZO5VC5v_W4,1121 +transformers/models/gemma3/__pycache__/__init__.cpython-313.pyc,, +transformers/models/gemma3/__pycache__/configuration_gemma3.cpython-313.pyc,, +transformers/models/gemma3/__pycache__/image_processing_gemma3.cpython-313.pyc,, +transformers/models/gemma3/__pycache__/image_processing_gemma3_fast.cpython-313.pyc,, +transformers/models/gemma3/__pycache__/modeling_gemma3.cpython-313.pyc,, +transformers/models/gemma3/__pycache__/modular_gemma3.cpython-313.pyc,, +transformers/models/gemma3/__pycache__/processing_gemma3.cpython-313.pyc,, +transformers/models/gemma3/configuration_gemma3.py,sha256=Dk2pyAJiIgKoqqY_xJCcGSor3eAHj0JQ_9U7SneSGdk,17851 +transformers/models/gemma3/image_processing_gemma3.py,sha256=u1Fo2LBwEIT5Nw_eisuM8cXg87ycrQxIruQdNkfGvhk,20140 +transformers/models/gemma3/image_processing_gemma3_fast.py,sha256=Ta3HqteIYjCou7olRU1rgZ8cqbcsiC5x1HExDnhkJxI,10951 +transformers/models/gemma3/modeling_gemma3.py,sha256=TarjvfGms0XfdgFKeM4cpeZtjj2wCr9WXhveQZi0OFU,59215 +transformers/models/gemma3/modular_gemma3.py,sha256=fUsLZ-usLy5osh7wF4KH_MACrffT2XjxW-7ivqKUvPo,55391 +transformers/models/gemma3/processing_gemma3.py,sha256=-b3OzlSoSsGFXHyng8144ioPuhqVEmXS9yLvQpxEuTE,7818 +transformers/models/gemma3n/__init__.py,sha256=ZSrv5oSiULGXY7Vszb--vaJh1l7FBe1lrZD_3LX6cj4,1079 +transformers/models/gemma3n/__pycache__/__init__.cpython-313.pyc,, +transformers/models/gemma3n/__pycache__/configuration_gemma3n.cpython-313.pyc,, +transformers/models/gemma3n/__pycache__/feature_extraction_gemma3n.cpython-313.pyc,, +transformers/models/gemma3n/__pycache__/modeling_gemma3n.cpython-313.pyc,, +transformers/models/gemma3n/__pycache__/modular_gemma3n.cpython-313.pyc,, +transformers/models/gemma3n/__pycache__/processing_gemma3n.cpython-313.pyc,, +transformers/models/gemma3n/configuration_gemma3n.py,sha256=2EcZp94aPITiRM7R4IXJ2UbVl3k-hatbzq5w8TUIABo,36427 +transformers/models/gemma3n/feature_extraction_gemma3n.py,sha256=2S4N0Qn8EvNfqIv-TvMyq5D30f8GzeURmjdDlIKQS8w,15130 +transformers/models/gemma3n/modeling_gemma3n.py,sha256=8WBUcM3Hpfrj_IyLwlOS-DYvfUF4RDk1knEh7sqokvU,112595 +transformers/models/gemma3n/modular_gemma3n.py,sha256=tMejAt4kJcE1jUP7zWijYBn1-ccDlUfGlddBkNEPAQU,130599 +transformers/models/gemma3n/processing_gemma3n.py,sha256=XjOAcZxamgS5ahNXUy1DNe5qspn6zO0Z1zbsnONn0wI,7064 +transformers/models/git/__init__.py,sha256=jY1iLd7UMOmcCfrKgzoUJawLa0DQ55wHN26L09YSwhc,1021 +transformers/models/git/__pycache__/__init__.cpython-313.pyc,, +transformers/models/git/__pycache__/configuration_git.cpython-313.pyc,, +transformers/models/git/__pycache__/modeling_git.cpython-313.pyc,, +transformers/models/git/__pycache__/processing_git.cpython-313.pyc,, +transformers/models/git/configuration_git.py,sha256=SNcI2qHfnAuwDcYWfiP8Sb_TQXPtosHlw1vDY8bEl04,10447 +transformers/models/git/modeling_git.py,sha256=shLXwnA0DhnKnwyN4-3i6W-DN5DxZjhqVle3ZUJx77c,62276 +transformers/models/git/processing_git.py,sha256=PRKMd0E7yqdnPt2wJKxIGWm8H0PmetZCbLpBKXbe6V0,1586 +transformers/models/glm/__init__.py,sha256=fIafw6FAflbbeG_nEM_VPJyMJHnu_NbWHTHjECIAvIs,987 +transformers/models/glm/__pycache__/__init__.cpython-313.pyc,, +transformers/models/glm/__pycache__/configuration_glm.cpython-313.pyc,, +transformers/models/glm/__pycache__/modeling_glm.cpython-313.pyc,, +transformers/models/glm/__pycache__/modular_glm.cpython-313.pyc,, +transformers/models/glm/configuration_glm.py,sha256=0i7hGoGPrP308WOqZ2ZbGCw2-06GRiDvxAv_m2Fd-Fg,7535 +transformers/models/glm/modeling_glm.py,sha256=pjAA3KEHQHcuI6vx44lOVMPUehbvgFI0mBNsoC-19To,21386 +transformers/models/glm/modular_glm.py,sha256=YtrteOxgDeA1J9QcLdOc6v17tSUIHW596Zvus6kDPEk,4063 +transformers/models/glm4/__init__.py,sha256=okqViVxR-MUlkyIdKmSwrDKA7u8pGG49OIKtW9X1hvU,989 +transformers/models/glm4/__pycache__/__init__.cpython-313.pyc,, +transformers/models/glm4/__pycache__/configuration_glm4.cpython-313.pyc,, +transformers/models/glm4/__pycache__/modeling_glm4.cpython-313.pyc,, +transformers/models/glm4/__pycache__/modular_glm4.cpython-313.pyc,, +transformers/models/glm4/configuration_glm4.py,sha256=lFRWkK1kw_GDnhi0w0BViKnQ9FBpRp0uMEyjLxNW7dY,7551 +transformers/models/glm4/modeling_glm4.py,sha256=mkNRoafjKv_0RvWW6PVg5Je2AF4H1gwIl2tQaB6HGKg,22269 +transformers/models/glm4/modular_glm4.py,sha256=pch-cNrRSb-uoWzp6zKypU-un2Rf8yvzTophNck8yGA,5369 +transformers/models/glm4_moe/__init__.py,sha256=dfmB1kPUzq5-xfXh3zFtfGdSJu7CDDbfL401u_EayjM,997 +transformers/models/glm4_moe/__pycache__/__init__.cpython-313.pyc,, +transformers/models/glm4_moe/__pycache__/configuration_glm4_moe.cpython-313.pyc,, +transformers/models/glm4_moe/__pycache__/modeling_glm4_moe.cpython-313.pyc,, +transformers/models/glm4_moe/__pycache__/modular_glm4_moe.cpython-313.pyc,, +transformers/models/glm4_moe/configuration_glm4_moe.py,sha256=uhNU910obiitL0_IAnbO_STy7uuIBY_uugscT0caAJE,13422 +transformers/models/glm4_moe/modeling_glm4_moe.py,sha256=eCcgo9UWQZJI0Pt643iEQcBgGC97Nr_HTU_SklKjbYk,26948 +transformers/models/glm4_moe/modular_glm4_moe.py,sha256=r-tPmAgMGc9TBtllyiKgLxIfI3O1ND0glaPTkdZgNPc,15685 +transformers/models/glm4v/__init__.py,sha256=czqsAA98MYCyclV5YncS0xt0pnQcYUZc7jFgxZUEKmQ,1027 +transformers/models/glm4v/__pycache__/__init__.cpython-313.pyc,, +transformers/models/glm4v/__pycache__/configuration_glm4v.cpython-313.pyc,, +transformers/models/glm4v/__pycache__/image_processing_glm4v.cpython-313.pyc,, +transformers/models/glm4v/__pycache__/image_processing_glm4v_fast.cpython-313.pyc,, +transformers/models/glm4v/__pycache__/modeling_glm4v.cpython-313.pyc,, +transformers/models/glm4v/__pycache__/modular_glm4v.cpython-313.pyc,, +transformers/models/glm4v/__pycache__/processing_glm4v.cpython-313.pyc,, +transformers/models/glm4v/__pycache__/video_processing_glm4v.cpython-313.pyc,, +transformers/models/glm4v/configuration_glm4v.py,sha256=adgac_qGRsUa3-cesp28ffYERnw4_x6x10NTedhmH0s,17605 +transformers/models/glm4v/image_processing_glm4v.py,sha256=cRsxk6J3Vv1WaSHhlqEfsxZI0iUPkQp2Tt9EOzz01R0,24294 +transformers/models/glm4v/image_processing_glm4v_fast.py,sha256=zpNlTEBkartx0XJdMR0o1uuDjPbXB_sjQUK0QuZ3nT4,7701 +transformers/models/glm4v/modeling_glm4v.py,sha256=9oNWw8x4BnabRw8gl9_o4cf4UZMpo9Yktf3j-dLIdSE,75611 +transformers/models/glm4v/modular_glm4v.py,sha256=EphYFt2guzCFBfJ_FVYSF352TiElhyfZHEQMg7TEcpc,80424 +transformers/models/glm4v/processing_glm4v.py,sha256=F7paRqTx8NHcsZ8aWELbtfcWiFgkwBtKl9nrFmtzDa8,15328 +transformers/models/glm4v/video_processing_glm4v.py,sha256=6-I82f2FMwDo9FnJcWRkPxEJDnSFnYs1hp1zUpEykWs,10146 +transformers/models/glm4v_moe/__init__.py,sha256=4MuhU3oMjO5wL4YHEEhn0uU4qIsXz17DJPMs_xqMyHA,999 +transformers/models/glm4v_moe/__pycache__/__init__.cpython-313.pyc,, +transformers/models/glm4v_moe/__pycache__/configuration_glm4v_moe.cpython-313.pyc,, +transformers/models/glm4v_moe/__pycache__/modeling_glm4v_moe.cpython-313.pyc,, +transformers/models/glm4v_moe/__pycache__/modular_glm4v_moe.cpython-313.pyc,, +transformers/models/glm4v_moe/configuration_glm4v_moe.py,sha256=CQJRarut0RCNVDoJXaBlyAPIzqZh1_DdVI4gOjkansc,19575 +transformers/models/glm4v_moe/modeling_glm4v_moe.py,sha256=j4fXG9UA4QpUC0hMRXRUy0xX--9pzsoLZueDomKzF1I,81357 +transformers/models/glm4v_moe/modular_glm4v_moe.py,sha256=r1FUSTZQkuJt80hvTWMkeYLPXZJHz5l67WtkJmVevs0,21332 +transformers/models/glpn/__init__.py,sha256=YYoaugUj0un_FnfusrkzFfT_UtvUJEjMDaRDS8IcYAE,1073 +transformers/models/glpn/__pycache__/__init__.cpython-313.pyc,, +transformers/models/glpn/__pycache__/configuration_glpn.cpython-313.pyc,, +transformers/models/glpn/__pycache__/feature_extraction_glpn.cpython-313.pyc,, +transformers/models/glpn/__pycache__/image_processing_glpn.cpython-313.pyc,, +transformers/models/glpn/__pycache__/modeling_glpn.cpython-313.pyc,, +transformers/models/glpn/configuration_glpn.py,sha256=psEiatDZRceSeLe24Ch77es0_ugLEjzmzP81QthIXcI,5998 +transformers/models/glpn/feature_extraction_glpn.py,sha256=QC_SmxGijm3KyJtR_hEGG16TXPHpvv5pa5_0YrQLq0c,1284 +transformers/models/glpn/image_processing_glpn.py,sha256=R6BgbOIi9TBODIRK6Js_t6vC87JLbeWqLY-PS8N1fNI,12758 +transformers/models/glpn/modeling_glpn.py,sha256=I6mg9d1P9BL09ecueLTpGzh7UHJfWdXR_A528UtjM1M,29056 +transformers/models/got_ocr2/__init__.py,sha256=LBVZP8CBfOxaD9NLC2ZbZpLloHLIX7uDyM8m1-W2m6g,1138 +transformers/models/got_ocr2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/got_ocr2/__pycache__/configuration_got_ocr2.cpython-313.pyc,, +transformers/models/got_ocr2/__pycache__/image_processing_got_ocr2.cpython-313.pyc,, +transformers/models/got_ocr2/__pycache__/image_processing_got_ocr2_fast.cpython-313.pyc,, +transformers/models/got_ocr2/__pycache__/modeling_got_ocr2.cpython-313.pyc,, +transformers/models/got_ocr2/__pycache__/modular_got_ocr2.cpython-313.pyc,, +transformers/models/got_ocr2/__pycache__/processing_got_ocr2.cpython-313.pyc,, +transformers/models/got_ocr2/configuration_got_ocr2.py,sha256=n9a3boLFZN7HMCyFSrZhcZH-ceWk3_Ut9XJz1F_cEkI,9455 +transformers/models/got_ocr2/image_processing_got_ocr2.py,sha256=jixsvD8RfdHEtaoKtsfb4OQp-q8e4czkFrkwGQ61Xyk,25643 +transformers/models/got_ocr2/image_processing_got_ocr2_fast.py,sha256=vrmkfS9BNdkc4ZORCuMN9o9pNLFo5glRh4xGafduhvQ,10554 +transformers/models/got_ocr2/modeling_got_ocr2.py,sha256=OOVplKRGFyT4D48xPBW4HML3zFhaie_yF6AsKRAwDtA,36315 +transformers/models/got_ocr2/modular_got_ocr2.py,sha256=gjSNDrIoDUvCCKweW9--AAZHut2cuUvwgMLYwNbluhs,19508 +transformers/models/got_ocr2/processing_got_ocr2.py,sha256=UYg8mmvwvtU45wypto_azkPfxcPe-0uKBjwZabxxBB0,12579 +transformers/models/gpt2/__init__.py,sha256=NRi7aYu3gezDPsiXiiG6dgSpCMHSIvFpC3iI0w-JMA0,1182 +transformers/models/gpt2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/gpt2/__pycache__/configuration_gpt2.cpython-313.pyc,, +transformers/models/gpt2/__pycache__/modeling_flax_gpt2.cpython-313.pyc,, +transformers/models/gpt2/__pycache__/modeling_gpt2.cpython-313.pyc,, +transformers/models/gpt2/__pycache__/modeling_tf_gpt2.cpython-313.pyc,, +transformers/models/gpt2/__pycache__/tokenization_gpt2.cpython-313.pyc,, +transformers/models/gpt2/__pycache__/tokenization_gpt2_fast.cpython-313.pyc,, +transformers/models/gpt2/__pycache__/tokenization_gpt2_tf.cpython-313.pyc,, +transformers/models/gpt2/configuration_gpt2.py,sha256=oWdrBVDmgPqQJ2orzzELbqkbE_hvKk9Op4pwtEXN3hY,12059 +transformers/models/gpt2/modeling_flax_gpt2.py,sha256=IBlHlVFZw-O_BArk5pqMJWk-wsTVNLQLef6MGNDlCRk,32109 +transformers/models/gpt2/modeling_gpt2.py,sha256=tZElPXrK3TljW79vn2_QqX1r5dfPkBB04tlZ_z5HjoA,74275 +transformers/models/gpt2/modeling_tf_gpt2.py,sha256=xWPpjaGyLDObGq4-3zwsb5gQVpuXUZ2ayTZRI4wLB2c,56440 +transformers/models/gpt2/tokenization_gpt2.py,sha256=3bLgxaap-6YpehzZI-DR5s0FU8PM0riJjsmaiqKH_3Q,13154 +transformers/models/gpt2/tokenization_gpt2_fast.py,sha256=pKJ2PVaSzWUSIjXWoLp0r8LiIupthk_8oaTNhD-PhAw,5274 +transformers/models/gpt2/tokenization_gpt2_tf.py,sha256=Ma7Z4lkXDyeJeTw5-wwkJwttvIdUnGptzmnhmvzCX7A,4071 +transformers/models/gpt_bigcode/__init__.py,sha256=KQNb7PO57eZpP345wSbe_C3iL-N4VPscw1GY2mv81uE,1003 +transformers/models/gpt_bigcode/__pycache__/__init__.cpython-313.pyc,, +transformers/models/gpt_bigcode/__pycache__/configuration_gpt_bigcode.cpython-313.pyc,, +transformers/models/gpt_bigcode/__pycache__/modeling_gpt_bigcode.cpython-313.pyc,, +transformers/models/gpt_bigcode/configuration_gpt_bigcode.py,sha256=5pL1meyCVQZXvco9WsIFNvDhEdtpAEVgTOg-xg2cWrw,6375 +transformers/models/gpt_bigcode/modeling_gpt_bigcode.py,sha256=kuYuy6KU1bimkJBHiMsVJCjlH6xBj_SdmdHp6_6Syrs,40349 +transformers/models/gpt_neo/__init__.py,sha256=b25qxianvucgAd3OxuI00Rr5324o-CRes0zrcEIOCZI,1036 +transformers/models/gpt_neo/__pycache__/__init__.cpython-313.pyc,, +transformers/models/gpt_neo/__pycache__/configuration_gpt_neo.cpython-313.pyc,, +transformers/models/gpt_neo/__pycache__/modeling_flax_gpt_neo.cpython-313.pyc,, +transformers/models/gpt_neo/__pycache__/modeling_gpt_neo.cpython-313.pyc,, +transformers/models/gpt_neo/configuration_gpt_neo.py,sha256=zRidKD8M7zf-YbaDWUYw8ScjQesO5BSI6d7YoHnyjwU,11907 +transformers/models/gpt_neo/modeling_flax_gpt_neo.py,sha256=fD4XijeKuru5evmV7NcPQAtgvQha8H-oEyJm2uNlE4Y,28175 +transformers/models/gpt_neo/modeling_gpt_neo.py,sha256=rK-z9sg_2FL7nPhaMszUSr4ty4ycoiHW2v5u8jx5XNE,51567 +transformers/models/gpt_neox/__init__.py,sha256=6CL92CuqBTIDJ-YH_doFwb-oRylAffw7pwxedv3a-40,1043 +transformers/models/gpt_neox/__pycache__/__init__.cpython-313.pyc,, +transformers/models/gpt_neox/__pycache__/configuration_gpt_neox.cpython-313.pyc,, +transformers/models/gpt_neox/__pycache__/modeling_gpt_neox.cpython-313.pyc,, +transformers/models/gpt_neox/__pycache__/modular_gpt_neox.cpython-313.pyc,, +transformers/models/gpt_neox/__pycache__/tokenization_gpt_neox_fast.cpython-313.pyc,, +transformers/models/gpt_neox/configuration_gpt_neox.py,sha256=-z9ztrlOAgyorPeZKTp-fOvSVWOoZuqhiAfq0VTBuNM,10982 +transformers/models/gpt_neox/modeling_gpt_neox.py,sha256=NAp7W-q95rVTHDVXozQCifWElm2Zxr9t8aBkCbZrDZg,34516 +transformers/models/gpt_neox/modular_gpt_neox.py,sha256=bwULKb0ZJKZPpA0k3UMZ_5ptMkIBADqZFbDU7JPzIw0,28636 +transformers/models/gpt_neox/tokenization_gpt_neox_fast.py,sha256=iivrluP4OTkQlBQjqz1kpn0NhIrarqPafDbCgHGc80o,8986 +transformers/models/gpt_neox_japanese/__init__.py,sha256=z4kbUmZSjE-Hs9ba8ul3Yncc9ZJy7ePufbwwRlfqWqw,1065 +transformers/models/gpt_neox_japanese/__pycache__/__init__.cpython-313.pyc,, +transformers/models/gpt_neox_japanese/__pycache__/configuration_gpt_neox_japanese.cpython-313.pyc,, +transformers/models/gpt_neox_japanese/__pycache__/modeling_gpt_neox_japanese.cpython-313.pyc,, +transformers/models/gpt_neox_japanese/__pycache__/tokenization_gpt_neox_japanese.cpython-313.pyc,, +transformers/models/gpt_neox_japanese/configuration_gpt_neox_japanese.py,sha256=Mae05uoCqq3q20e-da1CoCxCegR_Ng6q5R-1hrcacVI,9122 +transformers/models/gpt_neox_japanese/modeling_gpt_neox_japanese.py,sha256=hOPyMK07om85VSpo7jGnuBwfSB-eKZZooRFzJZKGNgc,32605 +transformers/models/gpt_neox_japanese/tokenization_gpt_neox_japanese.py,sha256=kskjAVPuG8Xb6zm2dwFgi-0BeGz_KZOJUJD-ZdKOQ2o,16938 +transformers/models/gpt_oss/__init__.py,sha256=a3dnVKgP6RwbuxBJW3kodYKj8oVF5Y6pLJixMthP1yA,995 +transformers/models/gpt_oss/__pycache__/__init__.cpython-313.pyc,, +transformers/models/gpt_oss/__pycache__/configuration_gpt_oss.cpython-313.pyc,, +transformers/models/gpt_oss/__pycache__/modeling_gpt_oss.cpython-313.pyc,, +transformers/models/gpt_oss/__pycache__/modular_gpt_oss.cpython-313.pyc,, +transformers/models/gpt_oss/configuration_gpt_oss.py,sha256=K-rizkmb7ItV8Z-tzUOIT7DZ47StGkd0osj8XygKOhk,4881 +transformers/models/gpt_oss/modeling_gpt_oss.py,sha256=8EuOY2D3kzB7AzxnHfrukLS-KV0-HjBsYyoJQQvzEeE,32532 +transformers/models/gpt_oss/modular_gpt_oss.py,sha256=nFXME4Ncvu_GiQIzNcjSjK1FSgbcG2IyTXgOx_KbsmU,20472 +transformers/models/gpt_sw3/__init__.py,sha256=-g6WlJ6EhhrJKCCsPf78cgvGD7oWvfeW9GBGBpW6wcM,958 +transformers/models/gpt_sw3/__pycache__/__init__.cpython-313.pyc,, +transformers/models/gpt_sw3/__pycache__/tokenization_gpt_sw3.cpython-313.pyc,, +transformers/models/gpt_sw3/tokenization_gpt_sw3.py,sha256=6z6Yd9eLqFgEKb_Z4ow8kFOuhZVTD0ejrboc9aktNIc,12565 +transformers/models/gptj/__init__.py,sha256=rgFDJcsxcq1ytl7BTZthr7sSmaxqggSbvrIseycmE-s,1063 +transformers/models/gptj/__pycache__/__init__.cpython-313.pyc,, +transformers/models/gptj/__pycache__/configuration_gptj.cpython-313.pyc,, +transformers/models/gptj/__pycache__/modeling_flax_gptj.cpython-313.pyc,, +transformers/models/gptj/__pycache__/modeling_gptj.cpython-313.pyc,, +transformers/models/gptj/__pycache__/modeling_tf_gptj.cpython-313.pyc,, +transformers/models/gptj/configuration_gptj.py,sha256=wJU2oz2LYuleopQEzA2soQauHwae7JinCxJi_hGz2YM,8860 +transformers/models/gptj/modeling_flax_gptj.py,sha256=zod4lQZEi_H8sy4zbtmb2Gn5mEiFSmPZjYbffpztX_8,28620 +transformers/models/gptj/modeling_gptj.py,sha256=L-pUs5-6lk4sdRa71m4g7_MpbAb-VAQF0AIN2OW4Jks,53912 +transformers/models/gptj/modeling_tf_gptj.py,sha256=RHLHR65JSAH9IC-Rp3ZUVZKFkJfahNJZzlVwW--HMxE,47831 +transformers/models/granite/__init__.py,sha256=cDxmZNuphkDCs2U8W5C95Vhu577kdZHKHUWWaQ3vk5U,1015 +transformers/models/granite/__pycache__/__init__.cpython-313.pyc,, +transformers/models/granite/__pycache__/configuration_granite.cpython-313.pyc,, +transformers/models/granite/__pycache__/modeling_granite.cpython-313.pyc,, +transformers/models/granite/__pycache__/modular_granite.cpython-313.pyc,, +transformers/models/granite/configuration_granite.py,sha256=U1CQ2gvTYGx753UX0t5IOfcLllikDU7Jupj7NlOX3Dk,9348 +transformers/models/granite/modeling_granite.py,sha256=7bcOB4DRbrXkc98kTLkVPpbwH-SUQBDnhPzRy8WgtUI,24992 +transformers/models/granite/modular_granite.py,sha256=7Rcs_VTOj0bQO9HAQDuVUVxyk7UXOo71MtUmZTxorOU,11884 +transformers/models/granite_speech/__init__.py,sha256=xD_zbTTnBiaB6EEG4yinaWd-yza1waa01GNKVhsGL1M,1107 +transformers/models/granite_speech/__pycache__/__init__.cpython-313.pyc,, +transformers/models/granite_speech/__pycache__/configuration_granite_speech.cpython-313.pyc,, +transformers/models/granite_speech/__pycache__/feature_extraction_granite_speech.cpython-313.pyc,, +transformers/models/granite_speech/__pycache__/modeling_granite_speech.cpython-313.pyc,, +transformers/models/granite_speech/__pycache__/processing_granite_speech.cpython-313.pyc,, +transformers/models/granite_speech/configuration_granite_speech.py,sha256=EYXvGbPHGa6rTZa4FeeR-kg98I_gW9JTiI1MT8XyZwg,8565 +transformers/models/granite_speech/feature_extraction_granite_speech.py,sha256=CrQoQlWFlY3B3vM0mDdZCFM8t_xp21vjGieZl65gErE,7395 +transformers/models/granite_speech/modeling_granite_speech.py,sha256=_6EwamwcEOr9vWVPENvllVH0YZLyUzmRMbPLQ0I8Lv0,26533 +transformers/models/granite_speech/processing_granite_speech.py,sha256=pMT_ByZPVZC3xBkCfFoW8CSLkY-uMdNW_4ufPbTxFP0,3922 +transformers/models/granitemoe/__init__.py,sha256=e4KKtNT7YFkYkPBfcS0VyhpT_1vF0JkR2qdYKPqRUcE,1001 +transformers/models/granitemoe/__pycache__/__init__.cpython-313.pyc,, +transformers/models/granitemoe/__pycache__/configuration_granitemoe.cpython-313.pyc,, +transformers/models/granitemoe/__pycache__/modeling_granitemoe.cpython-313.pyc,, +transformers/models/granitemoe/configuration_granitemoe.py,sha256=bZMMl3W8IDz9VfbN98Y39K12K7C2Mm0D1-41vngyxfU,9513 +transformers/models/granitemoe/modeling_granitemoe.py,sha256=a9iCnweQKgYRaND2AHdLlfypOCqp4QKmwd7zf5W2FO0,44603 +transformers/models/granitemoehybrid/__init__.py,sha256=yiZusdNxb3DK3MNKdwcVNM2bFfeASr76tKQwQwmSJ68,1043 +transformers/models/granitemoehybrid/__pycache__/__init__.cpython-313.pyc,, +transformers/models/granitemoehybrid/__pycache__/configuration_granitemoehybrid.cpython-313.pyc,, +transformers/models/granitemoehybrid/__pycache__/modeling_granitemoehybrid.cpython-313.pyc,, +transformers/models/granitemoehybrid/__pycache__/modular_granitemoehybrid.cpython-313.pyc,, +transformers/models/granitemoehybrid/configuration_granitemoehybrid.py,sha256=PcoscInOieZzcB828m2LxvXuTtyp7zzw3QDS349nzr8,12558 +transformers/models/granitemoehybrid/modeling_granitemoehybrid.py,sha256=laEl9ygG9n1WwlD5x0WEskWqwOEx38umE8YE8TtZLwY,84423 +transformers/models/granitemoehybrid/modular_granitemoehybrid.py,sha256=YnzzsBOwhDHlvy4350oMR4RGa_ihOgYatE0y3mir1f4,16941 +transformers/models/granitemoeshared/__init__.py,sha256=vmY98tLts1c_yvkLn9X-xk6CFtXIKskzYvFGMqQAskc,1013 +transformers/models/granitemoeshared/__pycache__/__init__.cpython-313.pyc,, +transformers/models/granitemoeshared/__pycache__/configuration_granitemoeshared.cpython-313.pyc,, +transformers/models/granitemoeshared/__pycache__/modeling_granitemoeshared.cpython-313.pyc,, +transformers/models/granitemoeshared/__pycache__/modular_granitemoeshared.cpython-313.pyc,, +transformers/models/granitemoeshared/configuration_granitemoeshared.py,sha256=lYbrzbYRHxR5Xf8xb_7GVlw3NAkWGaMe_cVq8H6jIH8,9942 +transformers/models/granitemoeshared/modeling_granitemoeshared.py,sha256=Yqc7_xAcGBilqx1yvn1bdTnmE9UNS3unUjfeF1biNII,46993 +transformers/models/granitemoeshared/modular_granitemoeshared.py,sha256=fU0NhBHvcP_bfBvGRnvpIXXtE655p3UZPgGrEP-c9Q4,8093 +transformers/models/grounding_dino/__init__.py,sha256=nTxZfZioCpS8hj_L80qZQkgPviMZrTxkz14B9sQQJjk,1161 +transformers/models/grounding_dino/__pycache__/__init__.cpython-313.pyc,, +transformers/models/grounding_dino/__pycache__/configuration_grounding_dino.cpython-313.pyc,, +transformers/models/grounding_dino/__pycache__/image_processing_grounding_dino.cpython-313.pyc,, +transformers/models/grounding_dino/__pycache__/image_processing_grounding_dino_fast.cpython-313.pyc,, +transformers/models/grounding_dino/__pycache__/modeling_grounding_dino.cpython-313.pyc,, +transformers/models/grounding_dino/__pycache__/modular_grounding_dino.cpython-313.pyc,, +transformers/models/grounding_dino/__pycache__/processing_grounding_dino.cpython-313.pyc,, +transformers/models/grounding_dino/configuration_grounding_dino.py,sha256=v0jseFxOwfmdOxes3oL75e-GmIdjIEngnJdMNPCES_Q,15246 +transformers/models/grounding_dino/image_processing_grounding_dino.py,sha256=O_FW33pwjUIoIhrbtflKqLoBYGrJDbpPWvq7eObgxZw,72276 +transformers/models/grounding_dino/image_processing_grounding_dino_fast.py,sha256=WNYZo9mYCczm-ABP-6CRUR0s9YrApspPH5O5GFiuCZY,33714 +transformers/models/grounding_dino/modeling_grounding_dino.py,sha256=upjrUqy9in42urWB_Q-bCS_tNXnjfNbmnFXqFTUEsuA,130668 +transformers/models/grounding_dino/modular_grounding_dino.py,sha256=jxpSF3pY9G5a8apSE60KxLFOksSQ1HejZqsO_L_6M6k,5261 +transformers/models/grounding_dino/processing_grounding_dino.py,sha256=ByBNG7-vmk-Ok2fHswOIPxmFDb6pyY9FMCNxxPAAojk,12065 +transformers/models/groupvit/__init__.py,sha256=vrJ-tBa1XOd1CloHhXKMCIlggMxOS4M7jCcqlLQxMo4,1037 +transformers/models/groupvit/__pycache__/__init__.cpython-313.pyc,, +transformers/models/groupvit/__pycache__/configuration_groupvit.cpython-313.pyc,, +transformers/models/groupvit/__pycache__/modeling_groupvit.cpython-313.pyc,, +transformers/models/groupvit/__pycache__/modeling_tf_groupvit.cpython-313.pyc,, +transformers/models/groupvit/configuration_groupvit.py,sha256=HPn7QhjrbFnZvchmlNN_ONEW4Y3pDBlmHuMMk7OgrVs,18672 +transformers/models/groupvit/modeling_groupvit.py,sha256=ts2BpfArCnK63M8izxOw6aeqhRlW7UWgEEIulXUkHw0,60051 +transformers/models/groupvit/modeling_tf_groupvit.py,sha256=EUsn4bjKnjVuOlloMYViw5ZPWRrt7SkPcjcUi9Di6Nw,90065 +transformers/models/helium/__init__.py,sha256=b1Senw5Mr129rzZSd1sW6-Ies2kIAUHfplpzgGeuTFE,993 +transformers/models/helium/__pycache__/__init__.cpython-313.pyc,, +transformers/models/helium/__pycache__/configuration_helium.cpython-313.pyc,, +transformers/models/helium/__pycache__/modeling_helium.cpython-313.pyc,, +transformers/models/helium/__pycache__/modular_helium.cpython-313.pyc,, +transformers/models/helium/configuration_helium.py,sha256=kctXqQTceihfsqRx0vImc_urU3Ii5hyp-cUGy8TUT0E,7380 +transformers/models/helium/modeling_helium.py,sha256=roGKcoeg_DHPqeMkRBEwleq1NPn96gNc24S4jWl6q0g,20910 +transformers/models/helium/modular_helium.py,sha256=Vy5H8kuVy5fJM0JQb0fEGJ4l6VrAbn9FfPiw2B0S9JA,5471 +transformers/models/herbert/__init__.py,sha256=3i5hlRANc-OFP86y2qzb_OCWVjJQ9XQswiglh5KbU7Y,1003 +transformers/models/herbert/__pycache__/__init__.cpython-313.pyc,, +transformers/models/herbert/__pycache__/tokenization_herbert.cpython-313.pyc,, +transformers/models/herbert/__pycache__/tokenization_herbert_fast.cpython-313.pyc,, +transformers/models/herbert/tokenization_herbert.py,sha256=-1q4wQyllXCSydUXL0DO8Nm9OqmAcIGdNhCvgb6rxeg,23808 +transformers/models/herbert/tokenization_herbert_fast.py,sha256=S_47DZCpq7SK9d21QKf8jF5FZvVkRFERzD_iRV2IMg0,4919 +transformers/models/hgnet_v2/__init__.py,sha256=sBFNC0RNpS-oEnOiwtxy2SkUPAJgmI5uXXq2WjSHRd8,999 +transformers/models/hgnet_v2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/hgnet_v2/__pycache__/configuration_hgnet_v2.cpython-313.pyc,, +transformers/models/hgnet_v2/__pycache__/modeling_hgnet_v2.cpython-313.pyc,, +transformers/models/hgnet_v2/__pycache__/modular_hgnet_v2.cpython-313.pyc,, +transformers/models/hgnet_v2/configuration_hgnet_v2.py,sha256=Gr8N48fXLivF_jKgpm62-1mTvqo5QDfw-h0JceCXbjY,8823 +transformers/models/hgnet_v2/modeling_hgnet_v2.py,sha256=GPTGNKK5yPUV8AjQ-kAX-ynFK3uiSPaJdYo7fe-bfTA,17918 +transformers/models/hgnet_v2/modular_hgnet_v2.py,sha256=jC6wqnU6DZsLxf9UHsLgfUyN-3OXdVeUp5BXNAF9zbw,24358 +transformers/models/hiera/__init__.py,sha256=b1kwKtpZVISJZ5Pri421uvH2v3IoRQ6XXHzxFOPHN-g,991 +transformers/models/hiera/__pycache__/__init__.cpython-313.pyc,, +transformers/models/hiera/__pycache__/configuration_hiera.cpython-313.pyc,, +transformers/models/hiera/__pycache__/modeling_hiera.cpython-313.pyc,, +transformers/models/hiera/configuration_hiera.py,sha256=QbF2S73pDapMC0_AoQVnPZTqWgs0tXvWWgSAvjNxEFE,9319 +transformers/models/hiera/modeling_hiera.py,sha256=SgBXF5NmvHHNLxlNV5Cihi-FzuUl0jjtHTbjgA9RvVk,61836 +transformers/models/hubert/__init__.py,sha256=ai560JtgkksShocy0zcDejelkRZnK4IZPVKaTHCOxPQ,1031 +transformers/models/hubert/__pycache__/__init__.cpython-313.pyc,, +transformers/models/hubert/__pycache__/configuration_hubert.cpython-313.pyc,, +transformers/models/hubert/__pycache__/modeling_hubert.cpython-313.pyc,, +transformers/models/hubert/__pycache__/modeling_tf_hubert.cpython-313.pyc,, +transformers/models/hubert/__pycache__/modular_hubert.cpython-313.pyc,, +transformers/models/hubert/configuration_hubert.py,sha256=XFh70tUL-ITYtn-RMt-lZl_Ej2qQ24vJLZZyBYF3PwA,14962 +transformers/models/hubert/modeling_hubert.py,sha256=D0WYbO8xGUHMYntJ7ZNyObytNc4DosDc_A3E73ByYgM,54623 +transformers/models/hubert/modeling_tf_hubert.py,sha256=kNlAqimDX_Tkw4mcrTNa1hByvAZEnsQFs_RhdGW7txg,70566 +transformers/models/hubert/modular_hubert.py,sha256=fhDox-l7V90dmRmOWoJ0bGwhkJZHNmEzAlPLP5BdD-E,11949 +transformers/models/hunyuan_v1_dense/__init__.py,sha256=FZn7rOtjmLA5vwEeH99QcgAveRMl1LbHQOyBKhywPWY,442 +transformers/models/hunyuan_v1_dense/__pycache__/__init__.cpython-313.pyc,, +transformers/models/hunyuan_v1_dense/__pycache__/configuration_hunyuan_v1_dense.cpython-313.pyc,, +transformers/models/hunyuan_v1_dense/__pycache__/modeling_hunyuan_v1_dense.cpython-313.pyc,, +transformers/models/hunyuan_v1_dense/__pycache__/modular_hunyuan_v1_dense.cpython-313.pyc,, +transformers/models/hunyuan_v1_dense/configuration_hunyuan_v1_dense.py,sha256=xfweGl6-PcYJ996VgbIznOxQn4BwrFvQ9hxrtJpNLWM,9921 +transformers/models/hunyuan_v1_dense/modeling_hunyuan_v1_dense.py,sha256=M76pNiIO60221GenOf8SjFs0SlZvTKiUCY7mc-tZf3Q,22512 +transformers/models/hunyuan_v1_dense/modular_hunyuan_v1_dense.py,sha256=vgjtOAYjljo1drPQgDFbdu62I2-CSpjqrPiFNkvosGE,7610 +transformers/models/hunyuan_v1_moe/__init__.py,sha256=SrgoAMLn102HlTL_4fm7CwAFeFSlAWDvFGZcUU2S_9o,396 +transformers/models/hunyuan_v1_moe/__pycache__/__init__.cpython-313.pyc,, +transformers/models/hunyuan_v1_moe/__pycache__/configuration_hunyuan_v1_moe.cpython-313.pyc,, +transformers/models/hunyuan_v1_moe/__pycache__/modeling_hunyuan_v1_moe.cpython-313.pyc,, +transformers/models/hunyuan_v1_moe/__pycache__/modular_hunyuan_v1_moe.cpython-313.pyc,, +transformers/models/hunyuan_v1_moe/configuration_hunyuan_v1_moe.py,sha256=IQAJOMECCtBk224-2O9W3enCksVQluXIAW7Gu0sX6JQ,10694 +transformers/models/hunyuan_v1_moe/modeling_hunyuan_v1_moe.py,sha256=lKHVeuPciyzOQ5GSin2Dstu_yFhKo_76t8XLTTP2aJU,26384 +transformers/models/hunyuan_v1_moe/modular_hunyuan_v1_moe.py,sha256=uggfllo2UDdPc9dFeRzTGeowk_w9wAYULnbOvWgpFe8,11943 +transformers/models/ibert/__init__.py,sha256=UMTcE54y6O9UNF8l9VV2rrTlJSAHooxeNeHNzPSgr_E,991 +transformers/models/ibert/__pycache__/__init__.cpython-313.pyc,, +transformers/models/ibert/__pycache__/configuration_ibert.cpython-313.pyc,, +transformers/models/ibert/__pycache__/modeling_ibert.cpython-313.pyc,, +transformers/models/ibert/__pycache__/quant_modules.cpython-313.pyc,, +transformers/models/ibert/configuration_ibert.py,sha256=nLRgpOzXz8rNprkGfnz5LVuPKWbYnQfkAubJp9sJYyE,7120 +transformers/models/ibert/modeling_ibert.py,sha256=YsclA8wKcXrtT1FoAMDFKiKzX5H5z63U2gizYnHxMGM,51496 +transformers/models/ibert/quant_modules.py,sha256=IRq4JOfDn8BBDan2zDy8Fa70bMJ8Wa2gorNDeNVB6uc,30076 +transformers/models/idefics/__init__.py,sha256=zc4m1Vd6-Szs7Urt0Ry6eUScpza8iD-QPG4cq4xX34g,1116 +transformers/models/idefics/__pycache__/__init__.cpython-313.pyc,, +transformers/models/idefics/__pycache__/configuration_idefics.cpython-313.pyc,, +transformers/models/idefics/__pycache__/image_processing_idefics.cpython-313.pyc,, +transformers/models/idefics/__pycache__/modeling_idefics.cpython-313.pyc,, +transformers/models/idefics/__pycache__/modeling_tf_idefics.cpython-313.pyc,, +transformers/models/idefics/__pycache__/perceiver.cpython-313.pyc,, +transformers/models/idefics/__pycache__/perceiver_tf.cpython-313.pyc,, +transformers/models/idefics/__pycache__/processing_idefics.cpython-313.pyc,, +transformers/models/idefics/__pycache__/vision.cpython-313.pyc,, +transformers/models/idefics/__pycache__/vision_tf.cpython-313.pyc,, +transformers/models/idefics/configuration_idefics.py,sha256=4j7sAul74adsu3fXPiq34FePCqJJaafCg2dmHU9h_GU,15304 +transformers/models/idefics/image_processing_idefics.py,sha256=2UEHQpVQJQZZPoXrwfrhktyiMn-6dfk6PJj51z0R5Eo,9275 +transformers/models/idefics/modeling_idefics.py,sha256=30PnXwg3M0euNdR6ILCARkUa-A2i-YD8iQw7nJJmU4M,58085 +transformers/models/idefics/modeling_tf_idefics.py,sha256=zEU3dTNxi-2GEeBmDznquODfD__z89bfbB1DfxCByUc,79225 +transformers/models/idefics/perceiver.py,sha256=MkJ34X4dgVNJddcn8wUWyDf0rTioVl4WG3dP5GLXR0Q,9426 +transformers/models/idefics/perceiver_tf.py,sha256=XGRP3FaYcbHbxQa9_NoaLkipFfy9tiymgfx2w1GBT6E,9999 +transformers/models/idefics/processing_idefics.py,sha256=dCqIMvu9voJJ7V4MoblAPCG-yWv8pUPPW2MCZSQXusI,22853 +transformers/models/idefics/vision.py,sha256=oD7jZ2mG07Vv9BboEQmd0IdXR5_Q2uJIQcR5oRhj7I8,21333 +transformers/models/idefics/vision_tf.py,sha256=op9JJArkQWMFNG5_7JoAsXPSWQOyQWmXHiztp8LxCm0,26014 +transformers/models/idefics2/__init__.py,sha256=YmU2OQi-BTXESv52a4jtTwWC2ingparNU4-rXVCPWzQ,1131 +transformers/models/idefics2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/idefics2/__pycache__/configuration_idefics2.cpython-313.pyc,, +transformers/models/idefics2/__pycache__/image_processing_idefics2.cpython-313.pyc,, +transformers/models/idefics2/__pycache__/image_processing_idefics2_fast.cpython-313.pyc,, +transformers/models/idefics2/__pycache__/modeling_idefics2.cpython-313.pyc,, +transformers/models/idefics2/__pycache__/processing_idefics2.cpython-313.pyc,, +transformers/models/idefics2/configuration_idefics2.py,sha256=TUpFg-9WZ_ISWYfKXlVXT3Z5oTh0gxQ4BY-4fbHcRf0,12223 +transformers/models/idefics2/image_processing_idefics2.py,sha256=VY_xZAxXs9pC5jSprzmuPTLEjl5UknWJV34S8I-uRMw,26799 +transformers/models/idefics2/image_processing_idefics2_fast.py,sha256=dr7JFBogQZylKaYHcaVYnzvuJ4m1jm9kHYPEQo6L6_Y,12013 +transformers/models/idefics2/modeling_idefics2.py,sha256=gu5iCM5tf_47F_vX3M8oGAgeZDa1QUqke6d94unENm8,52929 +transformers/models/idefics2/processing_idefics2.py,sha256=8GQM4QkauSzB9zbzAvW01FcTvV6CP9jacVEy7A6vGBs,11805 +transformers/models/idefics3/__init__.py,sha256=zLsOtUFi074lvfGbwZEMVSvV08TZgoq0DVhJJagYoRo,1131 +transformers/models/idefics3/__pycache__/__init__.cpython-313.pyc,, +transformers/models/idefics3/__pycache__/configuration_idefics3.cpython-313.pyc,, +transformers/models/idefics3/__pycache__/image_processing_idefics3.cpython-313.pyc,, +transformers/models/idefics3/__pycache__/image_processing_idefics3_fast.cpython-313.pyc,, +transformers/models/idefics3/__pycache__/modeling_idefics3.cpython-313.pyc,, +transformers/models/idefics3/__pycache__/processing_idefics3.cpython-313.pyc,, +transformers/models/idefics3/configuration_idefics3.py,sha256=fQYZ0Eo-q7InVjNFat2RjL9RZQmR-9nAIlK9v_XNkZE,8566 +transformers/models/idefics3/image_processing_idefics3.py,sha256=pXbyNYn-0BZ8OAfWk8okyFXgPocjMv_DFDwciMxaIto,44086 +transformers/models/idefics3/image_processing_idefics3_fast.py,sha256=r87nPbM_owfX-fyc1e1dpm-zEDCd7lvgpqiymOLnTV0,23465 +transformers/models/idefics3/modeling_idefics3.py,sha256=rQn2A99-xl1Tzovk8yr3dMSpi8Dkiy3Uz4nePot2mHA,43996 +transformers/models/idefics3/processing_idefics3.py,sha256=3Hq7gNzpiXJh8Qi5B1iiyEPzji93WTfGF7OmbPJgaMU,18766 +transformers/models/ijepa/__init__.py,sha256=O0_Jqpy8kmorYC-x0QsoMYSHdqQt3E1j-UZGLQ9aCv0,991 +transformers/models/ijepa/__pycache__/__init__.cpython-313.pyc,, +transformers/models/ijepa/__pycache__/configuration_ijepa.cpython-313.pyc,, +transformers/models/ijepa/__pycache__/modeling_ijepa.cpython-313.pyc,, +transformers/models/ijepa/__pycache__/modular_ijepa.cpython-313.pyc,, +transformers/models/ijepa/configuration_ijepa.py,sha256=8lO360USWRUnrnBXO2SeiZN0ozKHJNb2K2D0_vCKeX8,5445 +transformers/models/ijepa/modeling_ijepa.py,sha256=XZkLqvCBigXntyBuRMiwxkqyNxdCJ4mXs5FAmwB-qjA,23527 +transformers/models/ijepa/modular_ijepa.py,sha256=YA9MMAQN4WEMJBuwI8vN46HS8cz9PjyMRRryx_iT7WI,7621 +transformers/models/imagegpt/__init__.py,sha256=NIc1rPRLS3e06c-xUW5Vk80IOSPK2L1ZrY_PJrHn1GY,1139 +transformers/models/imagegpt/__pycache__/__init__.cpython-313.pyc,, +transformers/models/imagegpt/__pycache__/configuration_imagegpt.cpython-313.pyc,, +transformers/models/imagegpt/__pycache__/feature_extraction_imagegpt.cpython-313.pyc,, +transformers/models/imagegpt/__pycache__/image_processing_imagegpt.cpython-313.pyc,, +transformers/models/imagegpt/__pycache__/image_processing_imagegpt_fast.cpython-313.pyc,, +transformers/models/imagegpt/__pycache__/modeling_imagegpt.cpython-313.pyc,, +transformers/models/imagegpt/configuration_imagegpt.py,sha256=bp6I42shNZUoPmwpTOWEiyUH3-UQkDzK7AkSLgsMZCo,8799 +transformers/models/imagegpt/feature_extraction_imagegpt.py,sha256=sU7HaHR9bGhzHYLuRDnvcHRCnxlJHkfTVItQYn7ZS5E,1316 +transformers/models/imagegpt/image_processing_imagegpt.py,sha256=SSJt-s0ozRQZzvwp4D94v59meSoirNEr1MOPveajDV4,15044 +transformers/models/imagegpt/image_processing_imagegpt_fast.py,sha256=eeOL5tDmrD2CdVsvMcWVgOacORGkQtdIOCpz-Q_uhps,8185 +transformers/models/imagegpt/modeling_imagegpt.py,sha256=mS8TemFYe-PZln9te9xkx-23NpNGB6QGaD52Ah9U6mM,45860 +transformers/models/informer/__init__.py,sha256=L-BwVQfdq5ve06VJJ-OnTh-m_YqSMNcpDQ1z6sbDtNI,997 +transformers/models/informer/__pycache__/__init__.cpython-313.pyc,, +transformers/models/informer/__pycache__/configuration_informer.cpython-313.pyc,, +transformers/models/informer/__pycache__/modeling_informer.cpython-313.pyc,, +transformers/models/informer/__pycache__/modular_informer.cpython-313.pyc,, +transformers/models/informer/configuration_informer.py,sha256=w_Si5k5ZakGLXANemPDBY7RX_j3rfcLQr_NawdVURcA,12488 +transformers/models/informer/modeling_informer.py,sha256=bdoc9Sl4FKfLt5U0jlBM3yPzGmAHcm67JKEblvgIsvQ,107068 +transformers/models/informer/modular_informer.py,sha256=mKtONM7e6FcHRYXaBZMhfPaSAKfl8qewSGHKK6jVkug,48642 +transformers/models/instructblip/__init__.py,sha256=gI7F0N1dRSYdZtTumtuoPcIJcuBI8PO4DEOQS4_nWuc,1048 +transformers/models/instructblip/__pycache__/__init__.cpython-313.pyc,, +transformers/models/instructblip/__pycache__/configuration_instructblip.cpython-313.pyc,, +transformers/models/instructblip/__pycache__/modeling_instructblip.cpython-313.pyc,, +transformers/models/instructblip/__pycache__/processing_instructblip.cpython-313.pyc,, +transformers/models/instructblip/configuration_instructblip.py,sha256=_EYzBIHXuM6DThV5EfpJVuglsL7OCu-axvN0u_Yfk2M,15823 +transformers/models/instructblip/modeling_instructblip.py,sha256=lcYCpwQcDEDMrw0zAHW9AN1_YK3mtuWLnM2MvmOKMIE,67654 +transformers/models/instructblip/processing_instructblip.py,sha256=vXnmWgHSp-zqIRtBe-Uc7VAlAKwPkykcG529dGqeeeQ,8846 +transformers/models/instructblipvideo/__init__.py,sha256=sgK7MEwrqKB6mQyEvhxcgOQc_OAtMDc9tAZqKF0sxfM,1171 +transformers/models/instructblipvideo/__pycache__/__init__.cpython-313.pyc,, +transformers/models/instructblipvideo/__pycache__/configuration_instructblipvideo.cpython-313.pyc,, +transformers/models/instructblipvideo/__pycache__/image_processing_instructblipvideo.cpython-313.pyc,, +transformers/models/instructblipvideo/__pycache__/modeling_instructblipvideo.cpython-313.pyc,, +transformers/models/instructblipvideo/__pycache__/modular_instructblipvideo.cpython-313.pyc,, +transformers/models/instructblipvideo/__pycache__/processing_instructblipvideo.cpython-313.pyc,, +transformers/models/instructblipvideo/__pycache__/video_processing_instructblipvideo.cpython-313.pyc,, +transformers/models/instructblipvideo/configuration_instructblipvideo.py,sha256=RWJu0fOE-UcvZRLYLCZB0MHmXOUReRv3YhN9lsliQ68,16931 +transformers/models/instructblipvideo/image_processing_instructblipvideo.py,sha256=APQbLGFQG66fFLgDXa9pLPCRTnebrcNdd7M6gEN8tm4,17072 +transformers/models/instructblipvideo/modeling_instructblipvideo.py,sha256=q05JQg0aznMw9XbqIJszoqTW2c2mlmQLE5DCQYxSwtM,71033 +transformers/models/instructblipvideo/modular_instructblipvideo.py,sha256=TNW5LRAom0_qSh4I-rRRgOGu45GWeu2i9Zw5LGIf1qQ,27290 +transformers/models/instructblipvideo/processing_instructblipvideo.py,sha256=tcQHwvKN5MZNnx5WZ5J4LIJOdT6MTnwCPlG7PoZ_yB4,9771 +transformers/models/instructblipvideo/video_processing_instructblipvideo.py,sha256=BlJwdsPnik5G6gHdDOi9WFmEF6LcaWCJ-HGZ88WLv8k,4069 +transformers/models/internvl/__init__.py,sha256=tNXeZ8TIWlY70CelRiihyPOudKQtRBZp-c9WqglJ8ss,1081 +transformers/models/internvl/__pycache__/__init__.cpython-313.pyc,, +transformers/models/internvl/__pycache__/configuration_internvl.cpython-313.pyc,, +transformers/models/internvl/__pycache__/modeling_internvl.cpython-313.pyc,, +transformers/models/internvl/__pycache__/modular_internvl.cpython-313.pyc,, +transformers/models/internvl/__pycache__/processing_internvl.cpython-313.pyc,, +transformers/models/internvl/__pycache__/video_processing_internvl.cpython-313.pyc,, +transformers/models/internvl/configuration_internvl.py,sha256=pHPwAbDBs-a2mCoadTGA-86y8w9io-9rn-mhFSMFe20,10622 +transformers/models/internvl/modeling_internvl.py,sha256=yByI-LFolfe3vpdhFnk_e1d4K3My8zKPMZ7B3iMKJ8M,39332 +transformers/models/internvl/modular_internvl.py,sha256=VBEh6V5CXxZzoZxm6GrhQIlxRz_YJCvZ3IF3aU2CG2I,26002 +transformers/models/internvl/processing_internvl.py,sha256=nFaEAvLfcx6uAUpGUeh8Z60j4nnex98Q7hTvO6DJZFY,15988 +transformers/models/internvl/video_processing_internvl.py,sha256=0NdBoCThQVnypaAOEsTm_p7r3gw-ni8aRXGoXnUQdlk,6599 +transformers/models/jamba/__init__.py,sha256=zN7Rmr--d5GCEJzMA7gxIz-BYFydPN3cyuif85YU0Fk,991 +transformers/models/jamba/__pycache__/__init__.cpython-313.pyc,, +transformers/models/jamba/__pycache__/configuration_jamba.cpython-313.pyc,, +transformers/models/jamba/__pycache__/modeling_jamba.cpython-313.pyc,, +transformers/models/jamba/configuration_jamba.py,sha256=_tAnWFB7DxGv4MF3dq2rSmzJ3Ys-8jamkEtN67mTPWQ,11745 +transformers/models/jamba/modeling_jamba.py,sha256=5Zeb2E0Hblb06w0E1c661KQgXOhNfmMakwSJxdGcSIM,68890 +transformers/models/janus/__init__.py,sha256=rTnJnHMmmoPxIVasip4sdS5aZ1wvetOtZUobIMYMHX4,1132 +transformers/models/janus/__pycache__/__init__.cpython-313.pyc,, +transformers/models/janus/__pycache__/configuration_janus.cpython-313.pyc,, +transformers/models/janus/__pycache__/image_processing_janus.cpython-313.pyc,, +transformers/models/janus/__pycache__/image_processing_janus_fast.cpython-313.pyc,, +transformers/models/janus/__pycache__/modeling_janus.cpython-313.pyc,, +transformers/models/janus/__pycache__/modular_janus.cpython-313.pyc,, +transformers/models/janus/__pycache__/processing_janus.cpython-313.pyc,, +transformers/models/janus/configuration_janus.py,sha256=7fL58_kFc-hWwhpq-6N_BJG82194A3uHz9QEmRK0A14,14908 +transformers/models/janus/image_processing_janus.py,sha256=MCGyQXhTCKI8i7azs9FyFF20sTG4CNvDvGKzSSM_i8g,26231 +transformers/models/janus/image_processing_janus_fast.py,sha256=QBOCNYnicSmhm55GqydGT7V2iRLvQLnThVnHjlPkcr0,9104 +transformers/models/janus/modeling_janus.py,sha256=GMJewKE6i7xtCe2pAgiCp52Hzyx0Pg9JWplp2MqojEc,61915 +transformers/models/janus/modular_janus.py,sha256=rEVkWNIlW8IsMeZyQRQlU0WzBrfIIjg0y96BRblS2ZI,77517 +transformers/models/janus/processing_janus.py,sha256=5JEIn8wvEtNh7DZJoWTXqAQrTXp2Ngb05DinunZNn7o,7538 +transformers/models/jetmoe/__init__.py,sha256=zhqtP2ZDCCl3Fp3VBnnuaA044Ztbh7fsUKogAKABOt0,993 +transformers/models/jetmoe/__pycache__/__init__.cpython-313.pyc,, +transformers/models/jetmoe/__pycache__/configuration_jetmoe.cpython-313.pyc,, +transformers/models/jetmoe/__pycache__/modeling_jetmoe.cpython-313.pyc,, +transformers/models/jetmoe/configuration_jetmoe.py,sha256=65oV7AlRZZEsz_4BlbWEf4DvdyqywSjew9KjE3Uzj_U,6851 +transformers/models/jetmoe/modeling_jetmoe.py,sha256=c-7YbteDWnb4QcfdbaxGLME8UR9QwImY9aVXVckFDbU,53752 +transformers/models/kosmos2/__init__.py,sha256=Ow8cLelhxl6fm5XvXzNQtPLt1xjIdVmGUwz5NoVVVto,1033 +transformers/models/kosmos2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/kosmos2/__pycache__/configuration_kosmos2.cpython-313.pyc,, +transformers/models/kosmos2/__pycache__/modeling_kosmos2.cpython-313.pyc,, +transformers/models/kosmos2/__pycache__/processing_kosmos2.cpython-313.pyc,, +transformers/models/kosmos2/configuration_kosmos2.py,sha256=J8MCJ8SzDJ03_8DiSMNiYyU46i0tl6LEc2K_u1dnUY8,11888 +transformers/models/kosmos2/modeling_kosmos2.py,sha256=XzHvet6cnlBwGxYKsSzf6LAYV3g_wtedSCM4PKnZ1wo,82065 +transformers/models/kosmos2/processing_kosmos2.py,sha256=Q5NPmkXRcIltvykzUASwobWc7xOQbbaMh5_hPD48WPQ,30907 +transformers/models/kosmos2_5/__init__.py,sha256=rn3igfupnqkSt_O0QLvLmrTy_fX_N7EIFPdHtr2ud7o,1179 +transformers/models/kosmos2_5/__pycache__/__init__.cpython-313.pyc,, +transformers/models/kosmos2_5/__pycache__/configuration_kosmos2_5.cpython-313.pyc,, +transformers/models/kosmos2_5/__pycache__/image_processing_kosmos2_5.cpython-313.pyc,, +transformers/models/kosmos2_5/__pycache__/image_processing_kosmos2_5_fast.cpython-313.pyc,, +transformers/models/kosmos2_5/__pycache__/modeling_kosmos2_5.cpython-313.pyc,, +transformers/models/kosmos2_5/__pycache__/processing_kosmos2_5.cpython-313.pyc,, +transformers/models/kosmos2_5/configuration_kosmos2_5.py,sha256=KwVBazDcAinxR9TQ2Z6wejbw_E9L2F5Scfk-cFaCm9Y,11654 +transformers/models/kosmos2_5/image_processing_kosmos2_5.py,sha256=6GYOgPTQeLEGgcHX7h9kPq3_XJprwWU6EjSUcIe7Re0,15011 +transformers/models/kosmos2_5/image_processing_kosmos2_5_fast.py,sha256=9IA6EagLo1L0j1vR8vZkgFWjEYhaS4YoeqgBjrnRTRg,12282 +transformers/models/kosmos2_5/modeling_kosmos2_5.py,sha256=Z5z08l1_wn9oHIW4TzD__GsL1-2lwSzrPGWeMhHE9EU,83002 +transformers/models/kosmos2_5/processing_kosmos2_5.py,sha256=oD6PEWiNb-aIjTO-ZQbfM8A9zN1ly5Us5xTfEzdaBEs,6441 +transformers/models/kyutai_speech_to_text/__init__.py,sha256=KxatXD7pSOmwZWzs7nFOXG9Hc2wxaAS3CYwxg54lq9g,1135 +transformers/models/kyutai_speech_to_text/__pycache__/__init__.cpython-313.pyc,, +transformers/models/kyutai_speech_to_text/__pycache__/configuration_kyutai_speech_to_text.cpython-313.pyc,, +transformers/models/kyutai_speech_to_text/__pycache__/feature_extraction_kyutai_speech_to_text.cpython-313.pyc,, +transformers/models/kyutai_speech_to_text/__pycache__/modeling_kyutai_speech_to_text.cpython-313.pyc,, +transformers/models/kyutai_speech_to_text/__pycache__/modular_kyutai_speech_to_text.cpython-313.pyc,, +transformers/models/kyutai_speech_to_text/__pycache__/processing_kyutai_speech_to_text.cpython-313.pyc,, +transformers/models/kyutai_speech_to_text/configuration_kyutai_speech_to_text.py,sha256=eeBUzFfZnDlhcCuhQzaFchNFT_Wq-2-mBxXPJ_lelnI,9030 +transformers/models/kyutai_speech_to_text/feature_extraction_kyutai_speech_to_text.py,sha256=PfGZhywYO9o3sbbO5fLfrJtXLsbhl1CRJmnPeUHbBgw,11808 +transformers/models/kyutai_speech_to_text/modeling_kyutai_speech_to_text.py,sha256=sEy7F1NTLL9QAmuJZ6aZw_Kh9DeDQoI8RayUmyTFkas,64505 +transformers/models/kyutai_speech_to_text/modular_kyutai_speech_to_text.py,sha256=GRCWOSiQY8rd5Sl_mfPdMcscZ8--0i7oJC_1fECdxVk,23366 +transformers/models/kyutai_speech_to_text/processing_kyutai_speech_to_text.py,sha256=EHRvOqyvO1vuQj7AyBAmaQHhgeocHfCWeQNT8KgmFgo,1504 +transformers/models/layoutlm/__init__.py,sha256=Mv-k01_9_SxbADuSx2pWoNGBxgUe4IH15Kcg-vc_0OI,1124 +transformers/models/layoutlm/__pycache__/__init__.cpython-313.pyc,, +transformers/models/layoutlm/__pycache__/configuration_layoutlm.cpython-313.pyc,, +transformers/models/layoutlm/__pycache__/modeling_layoutlm.cpython-313.pyc,, +transformers/models/layoutlm/__pycache__/modeling_tf_layoutlm.cpython-313.pyc,, +transformers/models/layoutlm/__pycache__/tokenization_layoutlm.cpython-313.pyc,, +transformers/models/layoutlm/__pycache__/tokenization_layoutlm_fast.cpython-313.pyc,, +transformers/models/layoutlm/configuration_layoutlm.py,sha256=lJpmBnPMPzNQ07IhYWznczuR_Xra5E7znijzmFuBzmM,8416 +transformers/models/layoutlm/modeling_layoutlm.py,sha256=F3q9we9P-5P4n_8jL3KtonK9RqoVpW-0dWSgzMFEx2M,47591 +transformers/models/layoutlm/modeling_tf_layoutlm.py,sha256=nbnFaMwHF6o4H37KjumFss3PkBG1JvR9bqSaCUgeQO0,73207 +transformers/models/layoutlm/tokenization_layoutlm.py,sha256=xmr9ECm3iKIekmGUFJCh6doTOuto43pZoGRJtOaAsnc,20141 +transformers/models/layoutlm/tokenization_layoutlm_fast.py,sha256=LQl2LTmosc0mhrdir332AMepzHmuwxCDrM8RxYxUrJM,6691 +transformers/models/layoutlmv2/__init__.py,sha256=8f9dWBf1riaQI2KAw-gIrQGNKP4f2uUdJxyRKNVD2lI,1281 +transformers/models/layoutlmv2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/layoutlmv2/__pycache__/configuration_layoutlmv2.cpython-313.pyc,, +transformers/models/layoutlmv2/__pycache__/feature_extraction_layoutlmv2.cpython-313.pyc,, +transformers/models/layoutlmv2/__pycache__/image_processing_layoutlmv2.cpython-313.pyc,, +transformers/models/layoutlmv2/__pycache__/image_processing_layoutlmv2_fast.cpython-313.pyc,, +transformers/models/layoutlmv2/__pycache__/modeling_layoutlmv2.cpython-313.pyc,, +transformers/models/layoutlmv2/__pycache__/processing_layoutlmv2.cpython-313.pyc,, +transformers/models/layoutlmv2/__pycache__/tokenization_layoutlmv2.cpython-313.pyc,, +transformers/models/layoutlmv2/__pycache__/tokenization_layoutlmv2_fast.cpython-313.pyc,, +transformers/models/layoutlmv2/configuration_layoutlmv2.py,sha256=LmhbEBUesm93DINDXZRfWIKrB0Agy3cflhMh7UCMtqY,10914 +transformers/models/layoutlmv2/feature_extraction_layoutlmv2.py,sha256=C-PxmCgb7CMj-nrs7-yOEvGwLcW1okgKjK2OvIsKjHE,1313 +transformers/models/layoutlmv2/image_processing_layoutlmv2.py,sha256=T1A2IEzQR8Du5lqcnpTZ4yDJKqcZfv5rOLwwGC5g5tk,13625 +transformers/models/layoutlmv2/image_processing_layoutlmv2_fast.py,sha256=F9vFWyyPyzWK2Iz6ghmdWRDod4Gl97gzXLEvCsp5VvI,5578 +transformers/models/layoutlmv2/modeling_layoutlmv2.py,sha256=dmMHao3__jjv095y3E94KIZ3RlLKWceISvJ5ERixoa4,61926 +transformers/models/layoutlmv2/processing_layoutlmv2.py,sha256=UGximtXB_J9XJ6_L1Zt9jQGbNLHz-nfhBQ6WaIE3bdg,8511 +transformers/models/layoutlmv2/tokenization_layoutlmv2.py,sha256=NPwb7mXxwLzRHqfDHqcKww8QEXePE8W-25cyJXYpeSg,72137 +transformers/models/layoutlmv2/tokenization_layoutlmv2_fast.py,sha256=kszD7kD1-Zi6tos9t3gSTja-wWCi0k8e3D5uhZjR3ws,37064 +transformers/models/layoutlmv3/__init__.py,sha256=N-Ty2DqEyDqyd5i-k89LMi2qSbojaasZ-ozSTxs1GHo,1323 +transformers/models/layoutlmv3/__pycache__/__init__.cpython-313.pyc,, +transformers/models/layoutlmv3/__pycache__/configuration_layoutlmv3.cpython-313.pyc,, +transformers/models/layoutlmv3/__pycache__/feature_extraction_layoutlmv3.cpython-313.pyc,, +transformers/models/layoutlmv3/__pycache__/image_processing_layoutlmv3.cpython-313.pyc,, +transformers/models/layoutlmv3/__pycache__/image_processing_layoutlmv3_fast.cpython-313.pyc,, +transformers/models/layoutlmv3/__pycache__/modeling_layoutlmv3.cpython-313.pyc,, +transformers/models/layoutlmv3/__pycache__/modeling_tf_layoutlmv3.cpython-313.pyc,, +transformers/models/layoutlmv3/__pycache__/processing_layoutlmv3.cpython-313.pyc,, +transformers/models/layoutlmv3/__pycache__/tokenization_layoutlmv3.cpython-313.pyc,, +transformers/models/layoutlmv3/__pycache__/tokenization_layoutlmv3_fast.cpython-313.pyc,, +transformers/models/layoutlmv3/configuration_layoutlmv3.py,sha256=-FaapM8qkEUrlkfsFAG27v5hPJh-SmkQkXSYb5OiBjU,13288 +transformers/models/layoutlmv3/feature_extraction_layoutlmv3.py,sha256=G2oB86aN-ebGJjw6YMsuEVuSfnIAylsteMWpZR3Gcvs,1313 +transformers/models/layoutlmv3/image_processing_layoutlmv3.py,sha256=dvQJqhk5gb76eDjOqR81Ds2JBAmE9doUXIBH7wADwtQ,18581 +transformers/models/layoutlmv3/image_processing_layoutlmv3_fast.py,sha256=L8SmPIaIQcfKWrqtOaD3aXtsajnJuwIgtKe7PzMY0_s,6165 +transformers/models/layoutlmv3/modeling_layoutlmv3.py,sha256=BrUTHHIbuqu4MXYeqDtekXBwQ-WjLLYSvpdzpBRCJ3c,53566 +transformers/models/layoutlmv3/modeling_tf_layoutlmv3.py,sha256=QMYgY31NUVETlCDaX9WIwSnPZwb5ZfHl8N0JfwVIEJc,76633 +transformers/models/layoutlmv3/processing_layoutlmv3.py,sha256=f4RPc5nM-8SRU02j_v34rm3QnGCKnYkTmxp34eTaEws,8362 +transformers/models/layoutlmv3/tokenization_layoutlmv3.py,sha256=17kZ8YegVDFnv0aUn14xeLWV2LTxQn8cZrgw1OXa6jE,73235 +transformers/models/layoutlmv3/tokenization_layoutlmv3_fast.py,sha256=l7JmjZ3uMA3fRimszuxqpQF5_0AFB-wjXwTwSwj7Rcg,39917 +transformers/models/layoutxlm/__init__.py,sha256=djfI2YGJISwww_XDfyf4kCj3a_HiC6Hld1rlaHRtHPg,1047 +transformers/models/layoutxlm/__pycache__/__init__.cpython-313.pyc,, +transformers/models/layoutxlm/__pycache__/processing_layoutxlm.cpython-313.pyc,, +transformers/models/layoutxlm/__pycache__/tokenization_layoutxlm.cpython-313.pyc,, +transformers/models/layoutxlm/__pycache__/tokenization_layoutxlm_fast.cpython-313.pyc,, +transformers/models/layoutxlm/processing_layoutxlm.py,sha256=E09g63s0vHiFjoqe2kbhlGNNSAG0Y8bd2GKWChimpRs,8442 +transformers/models/layoutxlm/tokenization_layoutxlm.py,sha256=BLbZIliyQIRcXAtExyWN0sIV2ZRf1FqFdsnlKsB-iIo,58329 +transformers/models/layoutxlm/tokenization_layoutxlm_fast.py,sha256=1J_rW1fGdTsQsJRbLfVhbWngXnKecX-xELBtYmu98QA,40471 +transformers/models/led/__init__.py,sha256=KaOht9jIet9WQrPRli8DwD7q5fzTWsffxf7LK-sQuw4,1099 +transformers/models/led/__pycache__/__init__.cpython-313.pyc,, +transformers/models/led/__pycache__/configuration_led.cpython-313.pyc,, +transformers/models/led/__pycache__/modeling_led.cpython-313.pyc,, +transformers/models/led/__pycache__/modeling_tf_led.cpython-313.pyc,, +transformers/models/led/__pycache__/tokenization_led.cpython-313.pyc,, +transformers/models/led/__pycache__/tokenization_led_fast.cpython-313.pyc,, +transformers/models/led/configuration_led.py,sha256=wuLDY2wIywEU_23WuDfcoGW8-bg_X8SmP843xnYFyZQ,7455 +transformers/models/led/modeling_led.py,sha256=7pgIWN-0sJdOfM06jLsGit64R61BeahlkP3MsLm4xIw,125061 +transformers/models/led/modeling_tf_led.py,sha256=WPRKVHYflyt5aYEvIum-rqs2lrEj629KCfHFMBSQeKM,123085 +transformers/models/led/tokenization_led.py,sha256=m7FhNIvAQer9k9t_WqYMGSE3k59yn7L5tIuB3JH7uzE,19843 +transformers/models/led/tokenization_led_fast.py,sha256=yO0G0Q6yGmsPCyEII9t-AnYQeEaJ-HQDbLjfAcGI9Cs,14170 +transformers/models/levit/__init__.py,sha256=acEjEeDtpQ9q3a-hf90z6TZ0js04BtZvbCcn4HGWCyk,1124 +transformers/models/levit/__pycache__/__init__.cpython-313.pyc,, +transformers/models/levit/__pycache__/configuration_levit.cpython-313.pyc,, +transformers/models/levit/__pycache__/feature_extraction_levit.cpython-313.pyc,, +transformers/models/levit/__pycache__/image_processing_levit.cpython-313.pyc,, +transformers/models/levit/__pycache__/image_processing_levit_fast.cpython-313.pyc,, +transformers/models/levit/__pycache__/modeling_levit.cpython-313.pyc,, +transformers/models/levit/configuration_levit.py,sha256=UOtUDcZK6i4kPrtjYgCoWpZsxf7A1BQZeZjCYoXnMek,5772 +transformers/models/levit/feature_extraction_levit.py,sha256=sR1MZBqvbep8KdqX45Sw3V--ZqCe3fePzC1CT9cv4Js,1317 +transformers/models/levit/image_processing_levit.py,sha256=n5z9mpFYTLFgyoIZ0MZ4b_mD8zh3BEA72ohMelsXIKk,16738 +transformers/models/levit/image_processing_levit_fast.py,sha256=ijtU7Idez27Re4fPgVNZ6mW9UqoXL6mgYk4T874IJLY,3733 +transformers/models/levit/modeling_levit.py,sha256=jIk0WOgBkwMUpFhzRlHDtFfS2ynaHWLZWOWGOxipu38,25251 +transformers/models/lfm2/__init__.py,sha256=9fNMRtqveDp18iRtUjNqCu0XELuvwJOmizUyI1h0zHw,989 +transformers/models/lfm2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/lfm2/__pycache__/configuration_lfm2.cpython-313.pyc,, +transformers/models/lfm2/__pycache__/modeling_lfm2.cpython-313.pyc,, +transformers/models/lfm2/__pycache__/modular_lfm2.cpython-313.pyc,, +transformers/models/lfm2/configuration_lfm2.py,sha256=6jJ2kBds8PYg6DyAHBFdpMIcJW7rBv-bkMrM1LLVgII,7831 +transformers/models/lfm2/modeling_lfm2.py,sha256=85n6KhEdrIx_wHsnF6uxDuPoJGiFEyH0VVPrsQ-9KLE,32197 +transformers/models/lfm2/modular_lfm2.py,sha256=YoX6B058cwdrf17ozF7M2VMcVe5N14eVFI25KcbVP-E,20988 +transformers/models/lfm2_vl/__init__.py,sha256=qKhv0-rYp9emO4HCSVPOpPCB8hUy62P2v2rYjkBeTAI,1082 +transformers/models/lfm2_vl/__pycache__/__init__.cpython-313.pyc,, +transformers/models/lfm2_vl/__pycache__/configuration_lfm2_vl.cpython-313.pyc,, +transformers/models/lfm2_vl/__pycache__/image_processing_lfm2_vl_fast.cpython-313.pyc,, +transformers/models/lfm2_vl/__pycache__/modeling_lfm2_vl.cpython-313.pyc,, +transformers/models/lfm2_vl/__pycache__/modular_lfm2_vl.cpython-313.pyc,, +transformers/models/lfm2_vl/__pycache__/processing_lfm2_vl.cpython-313.pyc,, +transformers/models/lfm2_vl/configuration_lfm2_vl.py,sha256=0nhn4M3gkICFzmLWdX3kZ6SzUdoXu4eAIkUkIlS3mXo,3846 +transformers/models/lfm2_vl/image_processing_lfm2_vl_fast.py,sha256=xKeZ7ZXgvIisvbPlxvUBCkszN_JsXvO6fQlmtgMNXe8,21323 +transformers/models/lfm2_vl/modeling_lfm2_vl.py,sha256=QP1V4tNhzdCWas74mdUuH-NqB2Vw_kOc_DuqYMxRvj4,20569 +transformers/models/lfm2_vl/modular_lfm2_vl.py,sha256=k6yTLP3bupZgYA5cIqUM5AesGVj-4fogVZHGsXUn-Yc,14130 +transformers/models/lfm2_vl/processing_lfm2_vl.py,sha256=QPexFW1QoQsE07xihNfH1d2iXUCg8RkE5Q9b8rnx8sI,11768 +transformers/models/lightglue/__init__.py,sha256=xdtDUaVLHRf2vWjlv-l2JWlllw6Obz8ZRZeV297Ds1Y,1045 +transformers/models/lightglue/__pycache__/__init__.cpython-313.pyc,, +transformers/models/lightglue/__pycache__/configuration_lightglue.cpython-313.pyc,, +transformers/models/lightglue/__pycache__/image_processing_lightglue.cpython-313.pyc,, +transformers/models/lightglue/__pycache__/modeling_lightglue.cpython-313.pyc,, +transformers/models/lightglue/__pycache__/modular_lightglue.cpython-313.pyc,, +transformers/models/lightglue/configuration_lightglue.py,sha256=jijJsgjlZEZ-MSZARQ1kKUiW_1VU5B-FVlHGtxWX4bc,8135 +transformers/models/lightglue/image_processing_lightglue.py,sha256=x2zCoHpti6yXIZMKhBxs9PQpdOvrERish-saiQh6I34,24864 +transformers/models/lightglue/modeling_lightglue.py,sha256=KqR-586C7yiEVJUnXFM2bZ1X1OJna0uL5wdKWWZYaEM,43590 +transformers/models/lightglue/modular_lightglue.py,sha256=7W4PXheKZc1FUG2Omk2c55WTsBOdTRtWP7yJRGEKxlw,51228 +transformers/models/lilt/__init__.py,sha256=9XEq7kJwN0mKO469mR0mtlRUdljjq7V80gejpqb59K0,989 +transformers/models/lilt/__pycache__/__init__.cpython-313.pyc,, +transformers/models/lilt/__pycache__/configuration_lilt.cpython-313.pyc,, +transformers/models/lilt/__pycache__/modeling_lilt.cpython-313.pyc,, +transformers/models/lilt/configuration_lilt.py,sha256=rPA7P9f9B2f29_Q74Dx93-WXyhK1JT5W67PoSrDOoQc,6737 +transformers/models/lilt/modeling_lilt.py,sha256=lHQ05B0KSgoocU-4ecavt-2kNg2lhnrA3X5eE1LGETU,48235 +transformers/models/llama/__init__.py,sha256=k1HnOc4-BwvgSizE8E0IlrkCh_TVgv1XX8G-xozfgLo,1111 +transformers/models/llama/__pycache__/__init__.cpython-313.pyc,, +transformers/models/llama/__pycache__/configuration_llama.cpython-313.pyc,, +transformers/models/llama/__pycache__/modeling_flax_llama.cpython-313.pyc,, +transformers/models/llama/__pycache__/modeling_llama.cpython-313.pyc,, +transformers/models/llama/__pycache__/tokenization_llama.cpython-313.pyc,, +transformers/models/llama/__pycache__/tokenization_llama_fast.cpython-313.pyc,, +transformers/models/llama/configuration_llama.py,sha256=m-ywRVkzFiivG0ty9E1ooHJlvQyV44PRHUxdpe06QI4,12077 +transformers/models/llama/modeling_flax_llama.py,sha256=3_PSmX_OPr7ENnUScfAyuepDwejN-2qFRj5vmy8y-KM,30675 +transformers/models/llama/modeling_llama.py,sha256=9Nz8uUaFeflPzoTqV0DWmox5NN-TYHCv-GM3VEfdyrs,20716 +transformers/models/llama/tokenization_llama.py,sha256=KQOtC9Jzm6vs9ugT--SyHjb_XLOUFZ4MwTNZuU6gUm8,18729 +transformers/models/llama/tokenization_llama_fast.py,sha256=nYK5v4GRDhoLkxhtgEoLqq8koigofPTAuRv8-rSvk8U,10965 +transformers/models/llama4/__init__.py,sha256=YLpUGkKivYWky6rr715H2yMb9fCPr_3AV8OwWd2mrpA,1078 +transformers/models/llama4/__pycache__/__init__.cpython-313.pyc,, +transformers/models/llama4/__pycache__/configuration_llama4.cpython-313.pyc,, +transformers/models/llama4/__pycache__/image_processing_llama4_fast.cpython-313.pyc,, +transformers/models/llama4/__pycache__/modeling_llama4.cpython-313.pyc,, +transformers/models/llama4/__pycache__/processing_llama4.cpython-313.pyc,, +transformers/models/llama4/configuration_llama4.py,sha256=5z4QQiJeuOOhz0qg3mRDsuUoR4rjdQuj7XHJAfinHAg,23277 +transformers/models/llama4/image_processing_llama4_fast.py,sha256=jIRVlT7pR8DgSnc3cgD_IBb3D5O4gz-lCpp2dkt962Y,18006 +transformers/models/llama4/modeling_llama4.py,sha256=weWNIP3ZYYzeT2U8djw4lD7jvBlzDse7zp_8I0mFE_o,59350 +transformers/models/llama4/processing_llama4.py,sha256=y7ZsQ8JsAYSs_FMzm64a4NpOTLn8L7qnGrRCOzCEhkA,16112 +transformers/models/llava/__init__.py,sha256=h7TDiwhtiqDQbay9v760sbmBGM6yWs3J1tmnIr3PCys,1074 +transformers/models/llava/__pycache__/__init__.cpython-313.pyc,, +transformers/models/llava/__pycache__/configuration_llava.cpython-313.pyc,, +transformers/models/llava/__pycache__/image_processing_llava.cpython-313.pyc,, +transformers/models/llava/__pycache__/image_processing_llava_fast.cpython-313.pyc,, +transformers/models/llava/__pycache__/modeling_llava.cpython-313.pyc,, +transformers/models/llava/__pycache__/processing_llava.cpython-313.pyc,, +transformers/models/llava/configuration_llava.py,sha256=MzKTXjeGMlr-KNFW7L8T4hg0bx-oZ9Yh2bhVtv3QUt4,5760 +transformers/models/llava/image_processing_llava.py,sha256=0SQZJ6fM4oXt1OL_qr-KSb7K0u7MZ1MiZ797eOUpExI,21257 +transformers/models/llava/image_processing_llava_fast.py,sha256=LrKEahtnD1Mx-GHSz-UQP3tlgIaf_b8N7-yRspytvPQ,6751 +transformers/models/llava/modeling_llava.py,sha256=9IPrz21NTWnAp-3SSXERY5bvGQMKoWVh_7FHSyXLoXw,20455 +transformers/models/llava/processing_llava.py,sha256=1Weh6NnTrmJwZT3y_nZTbrn_tmwXLN-FgJUBVipXTig,10038 +transformers/models/llava_next/__init__.py,sha256=gyT3qcEjuxecgCiFoQoz-tf10ShqzfOL8IzPOhpjfto,1141 +transformers/models/llava_next/__pycache__/__init__.cpython-313.pyc,, +transformers/models/llava_next/__pycache__/configuration_llava_next.cpython-313.pyc,, +transformers/models/llava_next/__pycache__/image_processing_llava_next.cpython-313.pyc,, +transformers/models/llava_next/__pycache__/image_processing_llava_next_fast.cpython-313.pyc,, +transformers/models/llava_next/__pycache__/modeling_llava_next.cpython-313.pyc,, +transformers/models/llava_next/__pycache__/processing_llava_next.cpython-313.pyc,, +transformers/models/llava_next/configuration_llava_next.py,sha256=8LvnjmDgKsNRvsrlqZRkN2NQsuHZXkPmEo-H-8PzkUI,6776 +transformers/models/llava_next/image_processing_llava_next.py,sha256=Tl0FEIv-6OqzB82QMSaTWWDCbOL9kS1t2cFky-1WGCM,35823 +transformers/models/llava_next/image_processing_llava_next_fast.py,sha256=Mp8KwvaE7IBm5rfYclF86zx43sOUcA6XK38ecRIM6NM,10701 +transformers/models/llava_next/modeling_llava_next.py,sha256=NwN0k3t4iQgWjTzDyWnQsZq2ObFq44WP3YejNEdje_I,35878 +transformers/models/llava_next/processing_llava_next.py,sha256=p8fbiv9bowT87QGrvatUKo1VNb3nTd5LoI02yK3N_B0,12794 +transformers/models/llava_next_video/__init__.py,sha256=OGiUL7X9x0bzmsnZi0KA6Sl2ycalLQHkTgOpISYu3q8,1113 +transformers/models/llava_next_video/__pycache__/__init__.cpython-313.pyc,, +transformers/models/llava_next_video/__pycache__/configuration_llava_next_video.cpython-313.pyc,, +transformers/models/llava_next_video/__pycache__/image_processing_llava_next_video.cpython-313.pyc,, +transformers/models/llava_next_video/__pycache__/modeling_llava_next_video.cpython-313.pyc,, +transformers/models/llava_next_video/__pycache__/modular_llava_next_video.cpython-313.pyc,, +transformers/models/llava_next_video/__pycache__/processing_llava_next_video.cpython-313.pyc,, +transformers/models/llava_next_video/__pycache__/video_processing_llava_next_video.cpython-313.pyc,, +transformers/models/llava_next_video/configuration_llava_next_video.py,sha256=XG-wMKQFmGwGhbQSE8bivrfVS2RPQgSqmW0PEbZRoCU,8266 +transformers/models/llava_next_video/image_processing_llava_next_video.py,sha256=J6cr5S2SrCPU0fNjXAq3pZ0CEhEtd3-y_VmCgjXwyhk,21316 +transformers/models/llava_next_video/modeling_llava_next_video.py,sha256=PewB3MWGrlT7Mc6DkGXpEAyy1_SQdjSGlLMfEXHSvxk,46224 +transformers/models/llava_next_video/modular_llava_next_video.py,sha256=Nlvy-hLpEFX6IN9wkfyjO1XZGqBeqDfpQsv4mGxeeRs,34656 +transformers/models/llava_next_video/processing_llava_next_video.py,sha256=McOf61VhnBdY6kyy4S2F1i8IVxcRID3roJIsxmyY3j4,15572 +transformers/models/llava_next_video/video_processing_llava_next_video.py,sha256=2YSNAjUMjIEOfoC8Z2kfRaGjitAedlCDj7ILYKZwfKo,1703 +transformers/models/llava_onevision/__init__.py,sha256=Eeg8yGcfdjCxwjSCg_zoXG48JG6gSYH8_aXBcOxQvnA,1218 +transformers/models/llava_onevision/__pycache__/__init__.cpython-313.pyc,, +transformers/models/llava_onevision/__pycache__/configuration_llava_onevision.cpython-313.pyc,, +transformers/models/llava_onevision/__pycache__/image_processing_llava_onevision.cpython-313.pyc,, +transformers/models/llava_onevision/__pycache__/image_processing_llava_onevision_fast.cpython-313.pyc,, +transformers/models/llava_onevision/__pycache__/modeling_llava_onevision.cpython-313.pyc,, +transformers/models/llava_onevision/__pycache__/modular_llava_onevision.cpython-313.pyc,, +transformers/models/llava_onevision/__pycache__/processing_llava_onevision.cpython-313.pyc,, +transformers/models/llava_onevision/__pycache__/video_processing_llava_onevision.cpython-313.pyc,, +transformers/models/llava_onevision/configuration_llava_onevision.py,sha256=eGeC9jBTNHeouYrJ8KP_IUYVLSbjO8ySj0oFNvGIJFA,8061 +transformers/models/llava_onevision/image_processing_llava_onevision.py,sha256=MGSsSMyaZLhsVuuJZ0wFtXluoILgozCPlOmsSk6q4Ug,38350 +transformers/models/llava_onevision/image_processing_llava_onevision_fast.py,sha256=KI5Q0XKB0NoVird15WaWi8237LAzXteEuJMk6_R_HH4,14756 +transformers/models/llava_onevision/modeling_llava_onevision.py,sha256=YzCIX-L5Wbcgk2_3W1LhrqFp0VGhHrKw_zYDC8Cjcfc,45637 +transformers/models/llava_onevision/modular_llava_onevision.py,sha256=JkdjGN4ohsuRajNW9Pe9gzW_BkDSJ_o9rHJvXzBLU-E,34808 +transformers/models/llava_onevision/processing_llava_onevision.py,sha256=M6OkrKryC7wxAfpaidHhNZi0riVVeFzIXVkiE-rFDgo,16862 +transformers/models/llava_onevision/video_processing_llava_onevision.py,sha256=Gupdo_ysxPfM0TAe-qUDEnnd-GsyOLPLnNXdjnVlae4,1713 +transformers/models/longcat_flash/__init__.py,sha256=0AQcD2MU3TfVLepEHTsarPZaT15FYuZRcFS8P5jc9Jg,1040 +transformers/models/longcat_flash/__pycache__/__init__.cpython-313.pyc,, +transformers/models/longcat_flash/__pycache__/configuration_longcat_flash.cpython-313.pyc,, +transformers/models/longcat_flash/__pycache__/modeling_longcat_flash.cpython-313.pyc,, +transformers/models/longcat_flash/__pycache__/modular_longcat_flash.cpython-313.pyc,, +transformers/models/longcat_flash/configuration_longcat_flash.py,sha256=zbQlcUkStgZfm6AI8vOvEL6z_uc5RO2HeCCeS3o22mM,11038 +transformers/models/longcat_flash/modeling_longcat_flash.py,sha256=SRKMDkcD3qdGpEQZiPkVqet6poBLUfhXfBsI3khjeec,29857 +transformers/models/longcat_flash/modular_longcat_flash.py,sha256=zLM9Mjb8v-ZK-zorIwYSAqkMBkMDo2kQYMW2AF2fRxs,15173 +transformers/models/longformer/__init__.py,sha256=vg5ScmyEX2D-xPfnxNNBhdj6-Xj0t3HoPmt709PQjTE,1134 +transformers/models/longformer/__pycache__/__init__.cpython-313.pyc,, +transformers/models/longformer/__pycache__/configuration_longformer.cpython-313.pyc,, +transformers/models/longformer/__pycache__/modeling_longformer.cpython-313.pyc,, +transformers/models/longformer/__pycache__/modeling_tf_longformer.cpython-313.pyc,, +transformers/models/longformer/__pycache__/tokenization_longformer.cpython-313.pyc,, +transformers/models/longformer/__pycache__/tokenization_longformer_fast.cpython-313.pyc,, +transformers/models/longformer/configuration_longformer.py,sha256=2AwcxPAnCJw5CbCdgTXndYBTlJTWi4QzDnilgG_63T0,8867 +transformers/models/longformer/modeling_longformer.py,sha256=GAwna49OZxXBXd9Ufaf5Aj11bzErUk4xZJjTyjz_yUI,108058 +transformers/models/longformer/modeling_tf_longformer.py,sha256=u-n2NW5EPB7UBMyIFWd5Jm_stpO7O9nlV74QHNBs-4E,129534 +transformers/models/longformer/tokenization_longformer.py,sha256=cK8ke6cLfMRWXlXw_8FpjTIzLoaBi3iY7Mgf4brMRu8,16818 +transformers/models/longformer/tokenization_longformer_fast.py,sha256=Gg8zvjr0Mwqi4aHupJZLykCilqE7jeXREXbMo59ziqQ,11230 +transformers/models/longt5/__init__.py,sha256=TzoI1JGkvJIf9NlHDQY8_EUuW-upkQZ23wh_8Urtet0,1033 +transformers/models/longt5/__pycache__/__init__.cpython-313.pyc,, +transformers/models/longt5/__pycache__/configuration_longt5.cpython-313.pyc,, +transformers/models/longt5/__pycache__/modeling_flax_longt5.cpython-313.pyc,, +transformers/models/longt5/__pycache__/modeling_longt5.cpython-313.pyc,, +transformers/models/longt5/configuration_longt5.py,sha256=ktQfrCmI60usRiyk-hcZinOpcs7p94zfOV47l_jQnWc,8116 +transformers/models/longt5/modeling_flax_longt5.py,sha256=lNP-l4MlXq4zGNYlo0gyF3oXvcNCk-9qDLQYkBElGE8,105838 +transformers/models/longt5/modeling_longt5.py,sha256=wX-CS3sX6SOUgQnYs4VmiVUR0X8EGjAR62bzOQeMWow,104896 +transformers/models/luke/__init__.py,sha256=YQL403sV6tk5t8sjvi-4hgvx1rvyThx45l7S4T4xpEE,1026 +transformers/models/luke/__pycache__/__init__.cpython-313.pyc,, +transformers/models/luke/__pycache__/configuration_luke.cpython-313.pyc,, +transformers/models/luke/__pycache__/modeling_luke.cpython-313.pyc,, +transformers/models/luke/__pycache__/tokenization_luke.cpython-313.pyc,, +transformers/models/luke/configuration_luke.py,sha256=q_QLFRDrJfABob9_6-xvSy7ES4VMYKg9A3_gG8DsxAM,6628 +transformers/models/luke/modeling_luke.py,sha256=6BylGCAFPj9-hVCDjHTD7vSegB9aFbuyDEnxG_xdbtk,98940 +transformers/models/luke/tokenization_luke.py,sha256=SHtnsAm-h1VwdHWjMPXQ0leHSCoqVFVdLA2KEJI2RpE,85643 +transformers/models/lxmert/__init__.py,sha256=iUyLmlBuiz_av7H5ghaQB4RNbpw275N7wwdmiiV0PAc,1114 +transformers/models/lxmert/__pycache__/__init__.cpython-313.pyc,, +transformers/models/lxmert/__pycache__/configuration_lxmert.cpython-313.pyc,, +transformers/models/lxmert/__pycache__/modeling_lxmert.cpython-313.pyc,, +transformers/models/lxmert/__pycache__/modeling_tf_lxmert.cpython-313.pyc,, +transformers/models/lxmert/__pycache__/tokenization_lxmert.cpython-313.pyc,, +transformers/models/lxmert/__pycache__/tokenization_lxmert_fast.cpython-313.pyc,, +transformers/models/lxmert/configuration_lxmert.py,sha256=etr-nrYjobgiPW4H9-PTC9VuGgOdR13DRiqifXFkna4,8934 +transformers/models/lxmert/modeling_lxmert.py,sha256=aG5GYE6DL2xFFLOMe_g6t8YSl1ZUSW2kbW8sSArPQns,63580 +transformers/models/lxmert/modeling_tf_lxmert.py,sha256=aGYP4T1uTzZchJkhZZNZXMgACuFe5TFNkHm8jTIX4S4,72722 +transformers/models/lxmert/tokenization_lxmert.py,sha256=He3yKkZAcjAoy0l7rNzc-W3mk_93FAWRB4YX8awnCU4,20165 +transformers/models/lxmert/tokenization_lxmert_fast.py,sha256=5E0lKrkPi1dSrXcZ2BxipbDtMMBVjcwFStrOBVELRv8,6625 +transformers/models/m2m_100/__init__.py,sha256=0uPov299rgQmMwwSyM_m0yGFejP5djgaUY37GkNGnC8,1035 +transformers/models/m2m_100/__pycache__/__init__.cpython-313.pyc,, +transformers/models/m2m_100/__pycache__/configuration_m2m_100.cpython-313.pyc,, +transformers/models/m2m_100/__pycache__/modeling_m2m_100.cpython-313.pyc,, +transformers/models/m2m_100/__pycache__/tokenization_m2m_100.cpython-313.pyc,, +transformers/models/m2m_100/configuration_m2m_100.py,sha256=iwR_eDM_JlooTz08PGdw8zqeFmXNq3_3ttNe1ivQjj0,13454 +transformers/models/m2m_100/modeling_m2m_100.py,sha256=ck-nTnujQ060LYDp0CKgH1HMGPJtt2Oh48W4NJYqWu4,67446 +transformers/models/m2m_100/tokenization_m2m_100.py,sha256=ZWAVadNEoJ9YCCDSe7lEzGIvp9tdBC9RfCELTGR1wHg,16416 +transformers/models/mamba/__init__.py,sha256=4oGJySQbwoALRGVWMEwXBm0A6fhKsr4Raly46a5g1G0,991 +transformers/models/mamba/__pycache__/__init__.cpython-313.pyc,, +transformers/models/mamba/__pycache__/configuration_mamba.cpython-313.pyc,, +transformers/models/mamba/__pycache__/modeling_mamba.cpython-313.pyc,, +transformers/models/mamba/configuration_mamba.py,sha256=krht7Qj-1yfYxdMr3zB9WhBVqUKiINt2o5BvDC8v-XI,7433 +transformers/models/mamba/modeling_mamba.py,sha256=Sl3rHMnCJ1eCasXdo0P1oMO5KMUKxdnHyGDiR7zgbN8,40131 +transformers/models/mamba2/__init__.py,sha256=Ui4j-I2cnPEEszkzRTLSUW42SE4Qg1YTuW6hGeaOFZg,993 +transformers/models/mamba2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/mamba2/__pycache__/configuration_mamba2.cpython-313.pyc,, +transformers/models/mamba2/__pycache__/modeling_mamba2.cpython-313.pyc,, +transformers/models/mamba2/configuration_mamba2.py,sha256=YWJ7Y_-cEiTLv45b5oChKdzHFh61VWFUMdDZhcjNygU,8214 +transformers/models/mamba2/modeling_mamba2.py,sha256=nOrvEkjTXjUxQDA7ejuLl2WSyXoOtjH-y6jcXiSV0Xg,47716 +transformers/models/marian/__init__.py,sha256=Yg8jbvM0Hf6WXua0__v_G-34dvG6zFib5R5e_qHtmYM,1110 +transformers/models/marian/__pycache__/__init__.cpython-313.pyc,, +transformers/models/marian/__pycache__/configuration_marian.cpython-313.pyc,, +transformers/models/marian/__pycache__/modeling_flax_marian.cpython-313.pyc,, +transformers/models/marian/__pycache__/modeling_marian.cpython-313.pyc,, +transformers/models/marian/__pycache__/modeling_tf_marian.cpython-313.pyc,, +transformers/models/marian/__pycache__/tokenization_marian.cpython-313.pyc,, +transformers/models/marian/configuration_marian.py,sha256=2Sv1CzVYYIXvLBnytaLG0T4k-ptoyphVLAJk-zRcsyw,18420 +transformers/models/marian/modeling_flax_marian.py,sha256=qkUUpKC4MPHmESuI0wjGQ95uu0Fs0jIjQMpKU_0-xMM,64429 +transformers/models/marian/modeling_marian.py,sha256=ibG5SSh_Gepa29z4Hz65D3CvFm041Fb1bcrI7NyVZgo,79177 +transformers/models/marian/modeling_tf_marian.py,sha256=5bdZNB0AV82RWOvPMGe86hUelH97WrVQPtqIIxKK4rQ,72668 +transformers/models/marian/tokenization_marian.py,sha256=4Ox1R818g5c4x_pR41JgJfOhAZOaLXTRDgOQPQrOkRU,16868 +transformers/models/markuplm/__init__.py,sha256=PyhrxFsms-oD4SOBO5j3t2mIPLN3PHjKBjTGaUTITMY,1170 +transformers/models/markuplm/__pycache__/__init__.cpython-313.pyc,, +transformers/models/markuplm/__pycache__/configuration_markuplm.cpython-313.pyc,, +transformers/models/markuplm/__pycache__/feature_extraction_markuplm.cpython-313.pyc,, +transformers/models/markuplm/__pycache__/modeling_markuplm.cpython-313.pyc,, +transformers/models/markuplm/__pycache__/processing_markuplm.cpython-313.pyc,, +transformers/models/markuplm/__pycache__/tokenization_markuplm.cpython-313.pyc,, +transformers/models/markuplm/__pycache__/tokenization_markuplm_fast.cpython-313.pyc,, +transformers/models/markuplm/configuration_markuplm.py,sha256=VhCY0oVEMneuTZNQZFjxwkNIsTEWJ5q5W66t__gS52A,7235 +transformers/models/markuplm/feature_extraction_markuplm.py,sha256=WquMM3IybHzSpniaKvRUux-liOst9-l3Z1ZmfDNab80,6443 +transformers/models/markuplm/modeling_markuplm.py,sha256=MvEq-OFaFOLqt6tuJ0ktpBEmto8ZTVVp7FV5K8jtevY,43375 +transformers/models/markuplm/processing_markuplm.py,sha256=UtpKUu8Obib8WGnF124p7hzen62ECfbOxgUhtpLzBWI,5629 +transformers/models/markuplm/tokenization_markuplm.py,sha256=dRfXzjXAgSlHgTnXHMQKNUOF9id4l-NbmvQWdsmOlRg,70151 +transformers/models/markuplm/tokenization_markuplm_fast.py,sha256=G_2BWkPgi0FMRqxFlZhL0ecalVeqx2uQVcpthoLBjQE,43320 +transformers/models/mask2former/__init__.py,sha256=ceWZ-4gEaZxvgdhdvM-K18wZxuovtdS8vXkQIFNlfr4,1104 +transformers/models/mask2former/__pycache__/__init__.cpython-313.pyc,, +transformers/models/mask2former/__pycache__/configuration_mask2former.cpython-313.pyc,, +transformers/models/mask2former/__pycache__/image_processing_mask2former.cpython-313.pyc,, +transformers/models/mask2former/__pycache__/image_processing_mask2former_fast.cpython-313.pyc,, +transformers/models/mask2former/__pycache__/modeling_mask2former.cpython-313.pyc,, +transformers/models/mask2former/__pycache__/modular_mask2former.cpython-313.pyc,, +transformers/models/mask2former/configuration_mask2former.py,sha256=6fJ8GbDInXB2Cly_QbrYx5xU4hNv-T8TV_9glVUiCtM,12588 +transformers/models/mask2former/image_processing_mask2former.py,sha256=6Po2kHXx_fnwAFC_e5zOl2nVA47vSHcrDy-4KLm6rPk,59164 +transformers/models/mask2former/image_processing_mask2former_fast.py,sha256=jmf-uq4oEKweyyB-aJK06Xait9PCw5nXD6L197utbYs,33574 +transformers/models/mask2former/modeling_mask2former.py,sha256=Q1fWIX2-u69gPuwV_WRjpDIINeErK9VukcwU8x4shD4,117039 +transformers/models/mask2former/modular_mask2former.py,sha256=S5ase8l-KaOejnGhY7PSa1qMeyAdqx2buLDHHgces6Y,15725 +transformers/models/maskformer/__init__.py,sha256=DWCF0SA7DGofmzu3y-j05x1NLeuKZHADvyyx4XfJwkw,1242 +transformers/models/maskformer/__pycache__/__init__.cpython-313.pyc,, +transformers/models/maskformer/__pycache__/configuration_maskformer.cpython-313.pyc,, +transformers/models/maskformer/__pycache__/configuration_maskformer_swin.cpython-313.pyc,, +transformers/models/maskformer/__pycache__/feature_extraction_maskformer.cpython-313.pyc,, +transformers/models/maskformer/__pycache__/image_processing_maskformer.cpython-313.pyc,, +transformers/models/maskformer/__pycache__/image_processing_maskformer_fast.cpython-313.pyc,, +transformers/models/maskformer/__pycache__/modeling_maskformer.cpython-313.pyc,, +transformers/models/maskformer/__pycache__/modeling_maskformer_swin.cpython-313.pyc,, +transformers/models/maskformer/configuration_maskformer.py,sha256=KBU5bzrkpaNiYZM8f3JX_EfVybtiSls5CeC5iTiXAW0,10673 +transformers/models/maskformer/configuration_maskformer_swin.py,sha256=-72GuV7OyDeU0wCX7OltUAcMPSpKZqwlqJwdsLWlijE,7253 +transformers/models/maskformer/feature_extraction_maskformer.py,sha256=QYnLeWeCeAJDLO9bK1W8hVhZ8QdUslC9nJK-3qNdaUc,1332 +transformers/models/maskformer/image_processing_maskformer.py,sha256=oH_AziQhua4OGicCdaPVxtPgMwRPqHQnlW5YtKv_xgA,60013 +transformers/models/maskformer/image_processing_maskformer_fast.py,sha256=OO3Cwwzaw-MLlVKtDxcH8s-D7Y-cYEga5CdTLnkazGA,34932 +transformers/models/maskformer/modeling_maskformer.py,sha256=jseYI6tv5QeDlcE0yCxe-P_sQTauzEO_GHWhTKMcioo,84617 +transformers/models/maskformer/modeling_maskformer_swin.py,sha256=1wrGgKy3HiaGPZ9vyIi6B8tGaKx9PVS6gmABmH7mMYc,40579 +transformers/models/mbart/__init__.py,sha256=VefKwprf7OVOTgkXowKV2hT8X3mM369sRJXDY5a49ig,1148 +transformers/models/mbart/__pycache__/__init__.cpython-313.pyc,, +transformers/models/mbart/__pycache__/configuration_mbart.cpython-313.pyc,, +transformers/models/mbart/__pycache__/modeling_flax_mbart.cpython-313.pyc,, +transformers/models/mbart/__pycache__/modeling_mbart.cpython-313.pyc,, +transformers/models/mbart/__pycache__/modeling_tf_mbart.cpython-313.pyc,, +transformers/models/mbart/__pycache__/tokenization_mbart.cpython-313.pyc,, +transformers/models/mbart/__pycache__/tokenization_mbart_fast.cpython-313.pyc,, +transformers/models/mbart/configuration_mbart.py,sha256=aWNWjpmHjvRVH6hw6JccAsXbameD2-fXBxuvd7xTmR8,18252 +transformers/models/mbart/modeling_flax_mbart.py,sha256=zKLyi2BuhINSDYU-WfFy48LSzz9hJBBoR3EZmeBBFRI,75373 +transformers/models/mbart/modeling_mbart.py,sha256=L7W_INSyg85bXlDFIRn9P3Fvl7ivizLJ8M08MxpztPY,88907 +transformers/models/mbart/modeling_tf_mbart.py,sha256=U_LB_z7NNp5ASoQf0HdfEPdPpL_KmeEP-AJ8C60SVHk,74106 +transformers/models/mbart/tokenization_mbart.py,sha256=5Qeg8LmCiYRQA_kgdnsHalP1Nux7VwBYJamfs-E6ERA,14200 +transformers/models/mbart/tokenization_mbart_fast.py,sha256=744tsO1V7FtO_MM_9E0OnGk98UViv9VKHB-g-KK0y-M,10880 +transformers/models/mbart50/__init__.py,sha256=9ukVFi1NqU3OoJcCJ-iKpJUZiu-K0t8yINuJHGltup0,1003 +transformers/models/mbart50/__pycache__/__init__.cpython-313.pyc,, +transformers/models/mbart50/__pycache__/tokenization_mbart50.cpython-313.pyc,, +transformers/models/mbart50/__pycache__/tokenization_mbart50_fast.cpython-313.pyc,, +transformers/models/mbart50/tokenization_mbart50.py,sha256=bHfZAjdxNKAsVruUc-gQ6768ga1qPzgLJwugIrarNgU,16403 +transformers/models/mbart50/tokenization_mbart50_fast.py,sha256=cNhawmrk4KjiiA4LVgsh1oUUAWMJzkZgSePFdHMi1Gc,11479 +transformers/models/megatron_bert/__init__.py,sha256=u1UIYjQlrfHcy81i2FzehRDJpt6KNfNJ4AePQYKgwOU,1007 +transformers/models/megatron_bert/__pycache__/__init__.cpython-313.pyc,, +transformers/models/megatron_bert/__pycache__/configuration_megatron_bert.cpython-313.pyc,, +transformers/models/megatron_bert/__pycache__/modeling_megatron_bert.cpython-313.pyc,, +transformers/models/megatron_bert/configuration_megatron_bert.py,sha256=Z8A_6hWPyBaC_64AHDlvxGB-08uqpGAyHlX12ty1k2s,6517 +transformers/models/megatron_bert/modeling_megatron_bert.py,sha256=z6pNdzh7Wf4_R0buFmpEW6ymBNTFjLigPxFEvEfWAVU,72123 +transformers/models/megatron_gpt2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +transformers/models/megatron_gpt2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/megatron_gpt2/__pycache__/checkpoint_reshaping_and_interoperability.cpython-313.pyc,, +transformers/models/megatron_gpt2/checkpoint_reshaping_and_interoperability.py,sha256=3Oe0z75_0SQSM4OR-hRtH_w24LmhSV9AgsyzwKA2R9Y,37650 +transformers/models/metaclip_2/__init__.py,sha256=S5n45gZa_DwDJ3PtBbKzcjqDd6uOY4sbsSXR227oFZI,1001 +transformers/models/metaclip_2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/metaclip_2/__pycache__/configuration_metaclip_2.cpython-313.pyc,, +transformers/models/metaclip_2/__pycache__/modeling_metaclip_2.cpython-313.pyc,, +transformers/models/metaclip_2/__pycache__/modular_metaclip_2.cpython-313.pyc,, +transformers/models/metaclip_2/configuration_metaclip_2.py,sha256=k49lWYQ7r8em6hr9ouPgk1B22xRS29lLi10L6W6AlQY,17801 +transformers/models/metaclip_2/modeling_metaclip_2.py,sha256=0iPTq4xJuqqJIsZtpOjp4OQ6SqTVAv9-kRnz9WRxHsU,57677 +transformers/models/metaclip_2/modular_metaclip_2.py,sha256=X7Tf8oAmTqm0SwUA_lWzlEH12rFVVzWw4_qEAc7RAWs,35628 +transformers/models/mgp_str/__init__.py,sha256=Qb3mXPCrWbQ1ksMRYMeXorrva97OOFNr1zoy4YQg-9k,1073 +transformers/models/mgp_str/__pycache__/__init__.cpython-313.pyc,, +transformers/models/mgp_str/__pycache__/configuration_mgp_str.cpython-313.pyc,, +transformers/models/mgp_str/__pycache__/modeling_mgp_str.cpython-313.pyc,, +transformers/models/mgp_str/__pycache__/processing_mgp_str.cpython-313.pyc,, +transformers/models/mgp_str/__pycache__/tokenization_mgp_str.cpython-313.pyc,, +transformers/models/mgp_str/configuration_mgp_str.py,sha256=Pvwj6oBIFPp219NkKV3b4kisp77UzkN2JCCy31z2RZQ,5810 +transformers/models/mgp_str/modeling_mgp_str.py,sha256=J2dpfa0_Xo-hoQm5E7-CLNu0cCSTRfAoRVjI6BQkKl8,18912 +transformers/models/mgp_str/processing_mgp_str.py,sha256=bPRD7T2XRsGhR_mGidVZeToZTxw_u9MnMJqWykLNb3M,9226 +transformers/models/mgp_str/tokenization_mgp_str.py,sha256=VKVCt4TKcIK2yhqPTWgtPdaLz7v1-I2rn6LuoN4OvFw,3793 +transformers/models/mimi/__init__.py,sha256=VXRZ-D8-AyOYcmRGvSxhjwTYQcSNXcCXi5ubks6Qxhk,989 +transformers/models/mimi/__pycache__/__init__.cpython-313.pyc,, +transformers/models/mimi/__pycache__/configuration_mimi.cpython-313.pyc,, +transformers/models/mimi/__pycache__/modeling_mimi.cpython-313.pyc,, +transformers/models/mimi/configuration_mimi.py,sha256=WNqutFlBjEvuNA4zuNJMX6_LA_ldn2EwTjhILJfqEbk,13507 +transformers/models/mimi/modeling_mimi.py,sha256=PaQYxZeDGmEpYRF_xqJrmNpNUc9DtGQyNTT5DyWHrMQ,79714 +transformers/models/minimax/__init__.py,sha256=3Ob5TqJX21OU-wQ5NF6aeyRbTRXRmoGzeaFFqtzkf7c,1028 +transformers/models/minimax/__pycache__/__init__.cpython-313.pyc,, +transformers/models/minimax/__pycache__/configuration_minimax.cpython-313.pyc,, +transformers/models/minimax/__pycache__/modeling_minimax.cpython-313.pyc,, +transformers/models/minimax/__pycache__/modular_minimax.cpython-313.pyc,, +transformers/models/minimax/configuration_minimax.py,sha256=aNAEtvqIgY0x2PC0ZxFF6Rv1-HA4ZrhY0cMXeptYR5A,11833 +transformers/models/minimax/modeling_minimax.py,sha256=Zw6bCmdtHzZwFmPgjZ89dy5xH_GFs9_1VY4NwQW0vn8,42045 +transformers/models/minimax/modular_minimax.py,sha256=QiSbxlL2E55nDwG5YPmBli56XyniMfIJbNYbbjzJ-Q8,27729 +transformers/models/ministral/__init__.py,sha256=618E2UYWKhHRaJQnG5YToNFqqiEPQBnYNlAVyZq889c,1019 +transformers/models/ministral/__pycache__/__init__.cpython-313.pyc,, +transformers/models/ministral/__pycache__/configuration_ministral.cpython-313.pyc,, +transformers/models/ministral/__pycache__/modeling_ministral.cpython-313.pyc,, +transformers/models/ministral/__pycache__/modular_ministral.cpython-313.pyc,, +transformers/models/ministral/configuration_ministral.py,sha256=TviU9iKkveoR8VFyn7B_4qws7jM40VI3hE0ZUsHwg8E,8227 +transformers/models/ministral/modeling_ministral.py,sha256=YFlKtqyTmZmrd-V5ENZF3Es5Xoc-4w8PN_BUNahLZ6M,21594 +transformers/models/ministral/modular_ministral.py,sha256=cN92-yup-GjKbSdgggEgTCCB4wR-nTVCqx2SPzp0nYw,11864 +transformers/models/mistral/__init__.py,sha256=PDX9s8k0BrsBlmNShhdijHKAp6zC3QYBUwgl1Dx9EsM,1095 +transformers/models/mistral/__pycache__/__init__.cpython-313.pyc,, +transformers/models/mistral/__pycache__/configuration_mistral.cpython-313.pyc,, +transformers/models/mistral/__pycache__/modeling_flax_mistral.cpython-313.pyc,, +transformers/models/mistral/__pycache__/modeling_mistral.cpython-313.pyc,, +transformers/models/mistral/__pycache__/modeling_tf_mistral.cpython-313.pyc,, +transformers/models/mistral/__pycache__/modular_mistral.cpython-313.pyc,, +transformers/models/mistral/configuration_mistral.py,sha256=vpsbjAdonF4w2vMU6I1O_GZIOPpcNp9-PEtIV0pHJ4I,8001 +transformers/models/mistral/modeling_flax_mistral.py,sha256=JVQ590d9nxhPZ2FhmsmOE1316MSrKCmapoDRt8EWZYI,31805 +transformers/models/mistral/modeling_mistral.py,sha256=aKrQAPKn1Oo8P8-L6w8pIY-HoYjLz1kRFHHP66S49l0,20745 +transformers/models/mistral/modeling_tf_mistral.py,sha256=_n7kgEEYJkaK7EhpDZQ2v8RdjDZLV7ODikhbjGnJQ-4,44357 +transformers/models/mistral/modular_mistral.py,sha256=l8iBjAJ5bFWLL1pkh0iDjpSIdUhBS0VntVKG3QEHlPk,7659 +transformers/models/mistral3/__init__.py,sha256=ccR4AQqjFkPl8JVYyVmVvbVm618FlOw4cpwT7N-8ZD4,1036 +transformers/models/mistral3/__pycache__/__init__.cpython-313.pyc,, +transformers/models/mistral3/__pycache__/configuration_mistral3.cpython-313.pyc,, +transformers/models/mistral3/__pycache__/modeling_mistral3.cpython-313.pyc,, +transformers/models/mistral3/__pycache__/modular_mistral3.cpython-313.pyc,, +transformers/models/mistral3/configuration_mistral3.py,sha256=__X0uaVt64fl_wrOlXLiMbg7RUMj0Palwyp44-zdAwc,5710 +transformers/models/mistral3/modeling_mistral3.py,sha256=iJuwZmxJdNxuyaB4uJ7Ko7hPB2IJLaCXxbxyH7nuP94,23442 +transformers/models/mistral3/modular_mistral3.py,sha256=GMNBdHR-J0d5fUc1kW4fVXzGTPui00WOoMAaPxodx8w,14388 +transformers/models/mixtral/__init__.py,sha256=_i66uHDx5A0-UBwgR2nwibxSf0ZePqpTa_Qsm0Cg_Bs,1015 +transformers/models/mixtral/__pycache__/__init__.cpython-313.pyc,, +transformers/models/mixtral/__pycache__/configuration_mixtral.cpython-313.pyc,, +transformers/models/mixtral/__pycache__/modeling_mixtral.cpython-313.pyc,, +transformers/models/mixtral/__pycache__/modular_mixtral.cpython-313.pyc,, +transformers/models/mixtral/configuration_mixtral.py,sha256=7OMbg8pfaU0WfYnGIIqq5nDzx98x8NPKi-NOb3oSAdk,9073 +transformers/models/mixtral/modeling_mixtral.py,sha256=phbCrLlAySzv5Mci91hY1NrVuir4mrCoYEBOiFJ-BB4,30282 +transformers/models/mixtral/modular_mixtral.py,sha256=cNEtBX6GS1GIn4r1S3kNqy-PM2SebtwYUSEtoFZg71Q,18953 +transformers/models/mlcd/__init__.py,sha256=hLiLB1E0jT7sI3s8TraLb_Z1WOpwS69zac5kyHNfx4E,989 +transformers/models/mlcd/__pycache__/__init__.cpython-313.pyc,, +transformers/models/mlcd/__pycache__/configuration_mlcd.cpython-313.pyc,, +transformers/models/mlcd/__pycache__/modeling_mlcd.cpython-313.pyc,, +transformers/models/mlcd/__pycache__/modular_mlcd.cpython-313.pyc,, +transformers/models/mlcd/configuration_mlcd.py,sha256=8WPScG0ONO9SbjrfdhSZtxIGCR5NNOl21sleN6Q4hQI,5805 +transformers/models/mlcd/modeling_mlcd.py,sha256=1HdAYVTP77B7_ucDw2FFupJzE1IktTRSPXYTp1BFumg,27365 +transformers/models/mlcd/modular_mlcd.py,sha256=oasTUnxnhqD37aoliw5Xg5K9w06IJPndIhM1B3zFcZw,23393 +transformers/models/mllama/__init__.py,sha256=2lTGCiL6EZirXNcu4aKV7vSmv50iRsQnCV-c9sahNXg,1073 +transformers/models/mllama/__pycache__/__init__.cpython-313.pyc,, +transformers/models/mllama/__pycache__/configuration_mllama.cpython-313.pyc,, +transformers/models/mllama/__pycache__/image_processing_mllama.cpython-313.pyc,, +transformers/models/mllama/__pycache__/modeling_mllama.cpython-313.pyc,, +transformers/models/mllama/__pycache__/processing_mllama.cpython-313.pyc,, +transformers/models/mllama/configuration_mllama.py,sha256=xNSwdFPE4V0MAKsiCljoK3UFekwLvRvMdSrNi_L5qZ0,18209 +transformers/models/mllama/image_processing_mllama.py,sha256=CJzTqSoe9n3tJnvo7XqPjVxjtQWE1zoxAMNcoH0HD58,38258 +transformers/models/mllama/modeling_mllama.py,sha256=qCiF5bMbCfSDXX7IbG2ZGocaaGooYBBdk78CDtjISuw,79050 +transformers/models/mllama/processing_mllama.py,sha256=oa4t3yHIMoIadyRmlgHCxgy156NzAYHQutkWCHmw0RM,17653 +transformers/models/mluke/__init__.py,sha256=e_3cNftWOmhNXk-zsA1-2DOBT9L56SHr-6qev0xI7Ws,956 +transformers/models/mluke/__pycache__/__init__.cpython-313.pyc,, +transformers/models/mluke/__pycache__/tokenization_mluke.cpython-313.pyc,, +transformers/models/mluke/tokenization_mluke.py,sha256=PpWmJFXmwp40fIQvDN_m7Xo4LJNtWNAt6x5JH6Wm0us,82153 +transformers/models/mm_grounding_dino/__init__.py,sha256=mk2hUY_rZw6JSjfkqSL4hVYJKxh5ViM3TZfib-09kpc,1015 +transformers/models/mm_grounding_dino/__pycache__/__init__.cpython-313.pyc,, +transformers/models/mm_grounding_dino/__pycache__/configuration_mm_grounding_dino.cpython-313.pyc,, +transformers/models/mm_grounding_dino/__pycache__/modeling_mm_grounding_dino.cpython-313.pyc,, +transformers/models/mm_grounding_dino/__pycache__/modular_mm_grounding_dino.cpython-313.pyc,, +transformers/models/mm_grounding_dino/configuration_mm_grounding_dino.py,sha256=1r6ZcJHNxQdIxGK_PYsvhfXVAmThPIVvIgSEL99T4KE,15282 +transformers/models/mm_grounding_dino/modeling_mm_grounding_dino.py,sha256=mxH878CgFMRV8Vc9M2SdGn821z0ZpHj9mRsXa7z4D_Y,129461 +transformers/models/mm_grounding_dino/modular_mm_grounding_dino.py,sha256=Njd8tx8Mg6YQwOCXNdNJeSQD3jhq5QsQtsrMLcPC2HI,19389 +transformers/models/mobilebert/__init__.py,sha256=Jy7IZ2oQAjyE_KOoT-I7Z9bqPRVLfsOwx8XY3Y43RFc,1134 +transformers/models/mobilebert/__pycache__/__init__.cpython-313.pyc,, +transformers/models/mobilebert/__pycache__/configuration_mobilebert.cpython-313.pyc,, +transformers/models/mobilebert/__pycache__/modeling_mobilebert.cpython-313.pyc,, +transformers/models/mobilebert/__pycache__/modeling_tf_mobilebert.cpython-313.pyc,, +transformers/models/mobilebert/__pycache__/tokenization_mobilebert.cpython-313.pyc,, +transformers/models/mobilebert/__pycache__/tokenization_mobilebert_fast.cpython-313.pyc,, +transformers/models/mobilebert/configuration_mobilebert.py,sha256=kSjUZXRAtgvEjp4C2pxC8Po5MS6rM4i4v_xAzvqqHVk,8283 +transformers/models/mobilebert/modeling_mobilebert.py,sha256=icrVzODm-Ep83hkqQzFqygFEKKLpB5yxltVhi1BXc28,63164 +transformers/models/mobilebert/modeling_tf_mobilebert.py,sha256=kTa_oE6_AnYfGoWefuzHT1PXzB7GLMwVmkwocK3kDNw,83932 +transformers/models/mobilebert/tokenization_mobilebert.py,sha256=u3_ex8yv14oHz90QsfBjkELZaDwQDNicgNFgXDgtkvU,20149 +transformers/models/mobilebert/tokenization_mobilebert_fast.py,sha256=_cLnh_Vn7JHMg973Q4kjdkO35VIpNOW9UQt9OIGAvok,6703 +transformers/models/mobilenet_v1/__init__.py,sha256=kS0kf8Q0rDhNcqIJM6iI6iVufztYwEl-TsOzVQZwn-Y,1159 +transformers/models/mobilenet_v1/__pycache__/__init__.cpython-313.pyc,, +transformers/models/mobilenet_v1/__pycache__/configuration_mobilenet_v1.cpython-313.pyc,, +transformers/models/mobilenet_v1/__pycache__/feature_extraction_mobilenet_v1.cpython-313.pyc,, +transformers/models/mobilenet_v1/__pycache__/image_processing_mobilenet_v1.cpython-313.pyc,, +transformers/models/mobilenet_v1/__pycache__/image_processing_mobilenet_v1_fast.cpython-313.pyc,, +transformers/models/mobilenet_v1/__pycache__/modeling_mobilenet_v1.cpython-313.pyc,, +transformers/models/mobilenet_v1/configuration_mobilenet_v1.py,sha256=aR3QacEyWEphbgo_mcEvHS7NOVOwxQUhDvoWziA-q54,4939 +transformers/models/mobilenet_v1/feature_extraction_mobilenet_v1.py,sha256=Yydhc-fHuAzWeUOk7qrLrrs-HzMeaLE_IWeploaoOQc,1341 +transformers/models/mobilenet_v1/image_processing_mobilenet_v1.py,sha256=2QhRLV0yySXPOqwnZWoroOXwWuJO0G367igwHcD_YBE,15403 +transformers/models/mobilenet_v1/image_processing_mobilenet_v1_fast.py,sha256=RQI4Ujpx8yV9YFFvXzZEVZxqiZrQ97JSHnP_640V6Bs,1498 +transformers/models/mobilenet_v1/modeling_mobilenet_v1.py,sha256=EdpkzqoKflLLd57I4IiBO4f8l0t1hY-5kP5FkB275v0,15263 +transformers/models/mobilenet_v2/__init__.py,sha256=PGEnF5QRb3bZg_Iux0DuD7VPysu8nA0WxJEd6YOXtmw,1159 +transformers/models/mobilenet_v2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/mobilenet_v2/__pycache__/configuration_mobilenet_v2.cpython-313.pyc,, +transformers/models/mobilenet_v2/__pycache__/feature_extraction_mobilenet_v2.cpython-313.pyc,, +transformers/models/mobilenet_v2/__pycache__/image_processing_mobilenet_v2.cpython-313.pyc,, +transformers/models/mobilenet_v2/__pycache__/image_processing_mobilenet_v2_fast.cpython-313.pyc,, +transformers/models/mobilenet_v2/__pycache__/modeling_mobilenet_v2.cpython-313.pyc,, +transformers/models/mobilenet_v2/configuration_mobilenet_v2.py,sha256=kEuy8U-ShxevXr2lGluqA4BRAq_3-UsWN4YutMm1yoc,6835 +transformers/models/mobilenet_v2/feature_extraction_mobilenet_v2.py,sha256=aO5lNZnnoRPfhoBEiDiLwuctZkGlFsX1Io-u167A7QU,1341 +transformers/models/mobilenet_v2/image_processing_mobilenet_v2.py,sha256=bdWTBTRZNIxkbjUpDdTYuX0cMbfoEVxRdFtZGIRCluw,24792 +transformers/models/mobilenet_v2/image_processing_mobilenet_v2_fast.py,sha256=KMUcJrCCXbjURFjjxAaLW43up6UAuiJ8P9L-OT6So7g,9722 +transformers/models/mobilenet_v2/modeling_mobilenet_v2.py,sha256=TzKMWwVX0WPcMjtthx5rVHj4ThUTj4zWSsAag6HQ8NQ,30987 +transformers/models/mobilevit/__init__.py,sha256=-pqcwwjQsaYPNEPlRiS9B06Rl9kZf4b8yWGWtW3d4K0,1185 +transformers/models/mobilevit/__pycache__/__init__.cpython-313.pyc,, +transformers/models/mobilevit/__pycache__/configuration_mobilevit.cpython-313.pyc,, +transformers/models/mobilevit/__pycache__/feature_extraction_mobilevit.cpython-313.pyc,, +transformers/models/mobilevit/__pycache__/image_processing_mobilevit.cpython-313.pyc,, +transformers/models/mobilevit/__pycache__/image_processing_mobilevit_fast.cpython-313.pyc,, +transformers/models/mobilevit/__pycache__/modeling_mobilevit.cpython-313.pyc,, +transformers/models/mobilevit/__pycache__/modeling_tf_mobilevit.cpython-313.pyc,, +transformers/models/mobilevit/configuration_mobilevit.py,sha256=tyQANkpBRv8MHvXm8nYGlMI_5gQJQekS25pQTQcbfPw,7596 +transformers/models/mobilevit/feature_extraction_mobilevit.py,sha256=rS3UvVaXJwUDc7ZsVoi33DAvQewGdnC4SOgqdxISEwk,1324 +transformers/models/mobilevit/image_processing_mobilevit.py,sha256=C-YObQ84n3UDfbnwFUAJl5rtRR9JrTRQHtOK230Uypg,23559 +transformers/models/mobilevit/image_processing_mobilevit_fast.py,sha256=9bkNGORXwIWH07GlgR7YYsfv31MTennEe45n_5CdZA8,9879 +transformers/models/mobilevit/modeling_mobilevit.py,sha256=-1iHPTcto0YydQYc5ojWtnYPxaXrR3AJhiZJUdDKARg,36742 +transformers/models/mobilevit/modeling_tf_mobilevit.py,sha256=VW-iXvdwlgflYrmmW_tfgb_QKE4ASB1I_5cbrjEvQUM,54753 +transformers/models/mobilevitv2/__init__.py,sha256=pAGk_9X22yOYvlcwbqTc4nm6fL4rPhAhDpdBguna5Q0,1003 +transformers/models/mobilevitv2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/mobilevitv2/__pycache__/configuration_mobilevitv2.cpython-313.pyc,, +transformers/models/mobilevitv2/__pycache__/modeling_mobilevitv2.cpython-313.pyc,, +transformers/models/mobilevitv2/configuration_mobilevitv2.py,sha256=2mQCHZ8tq2bfrswTfb1fnottcJfY3p_g_vLoWXjkmBE,7159 +transformers/models/mobilevitv2/modeling_mobilevitv2.py,sha256=YgyCwMybqDGiDEoJ_jsuG2OMaRbY4lSD5VdY3LxNJ5s,34491 +transformers/models/modernbert/__init__.py,sha256=BEQFRFfcKvUlphA1ibW3s34Vkbm-MUuyqzaLbrIFiAA,1006 +transformers/models/modernbert/__pycache__/__init__.cpython-313.pyc,, +transformers/models/modernbert/__pycache__/configuration_modernbert.cpython-313.pyc,, +transformers/models/modernbert/__pycache__/modeling_modernbert.cpython-313.pyc,, +transformers/models/modernbert/__pycache__/modular_modernbert.cpython-313.pyc,, +transformers/models/modernbert/configuration_modernbert.py,sha256=HQK0ucX7it6g0ilY3vHTUyRupzHOz50KpLNyNWsP92g,11493 +transformers/models/modernbert/modeling_modernbert.py,sha256=GyPbL1nT-AvZr4n7bfPOFa_9kLX_wJkl5zJFDmlIaP8,67793 +transformers/models/modernbert/modular_modernbert.py,sha256=RsZTbedS0GveVB7gUj2Y5Nhy_7bHofHOMSnmsVl0fRw,73478 +transformers/models/modernbert_decoder/__init__.py,sha256=RjLebVKPcGgNEORY5xypTPd8oYWcnFmbGm3hBqQX-HE,1022 +transformers/models/modernbert_decoder/__pycache__/__init__.cpython-313.pyc,, +transformers/models/modernbert_decoder/__pycache__/configuration_modernbert_decoder.cpython-313.pyc,, +transformers/models/modernbert_decoder/__pycache__/modeling_modernbert_decoder.cpython-313.pyc,, +transformers/models/modernbert_decoder/__pycache__/modular_modernbert_decoder.cpython-313.pyc,, +transformers/models/modernbert_decoder/configuration_modernbert_decoder.py,sha256=08paqhKhXbw-sTasdy5vNsujOFc5wGVCyxWJmtAqkHk,10629 +transformers/models/modernbert_decoder/modeling_modernbert_decoder.py,sha256=2EbZd3bp8AuDVsOr2ME8hIyRnVyEQs4r5Y_sYtNvu_8,32358 +transformers/models/modernbert_decoder/modular_modernbert_decoder.py,sha256=CSWjJEJC9-8mwDG28ylp_UdL9LjJZTstPhKJBSzTnOo,34856 +transformers/models/moonshine/__init__.py,sha256=eBgvc9LtoDnB6HnNvrObDWL3h_L4Sgn5-D-hepNfAmI,999 +transformers/models/moonshine/__pycache__/__init__.cpython-313.pyc,, +transformers/models/moonshine/__pycache__/configuration_moonshine.cpython-313.pyc,, +transformers/models/moonshine/__pycache__/modeling_moonshine.cpython-313.pyc,, +transformers/models/moonshine/__pycache__/modular_moonshine.cpython-313.pyc,, +transformers/models/moonshine/configuration_moonshine.py,sha256=FCiqs9BayGl3oUGFh4FIAFz-3eZKlpqQOGusZvhFBZ8,13524 +transformers/models/moonshine/modeling_moonshine.py,sha256=pYrG8rK3k3WQLmm64DPC5XJwwRsWoxFkIzbMacxi-I8,49058 +transformers/models/moonshine/modular_moonshine.py,sha256=TjYULI0gcQr7ZEi5o3uRV_JCRuiUeQqX_A1tfQKLfqc,43688 +transformers/models/moshi/__init__.py,sha256=uW4oqTKZdbmURZaC_xwwHXnYEMyLJrMEJAlfbUzSWO8,991 +transformers/models/moshi/__pycache__/__init__.cpython-313.pyc,, +transformers/models/moshi/__pycache__/configuration_moshi.cpython-313.pyc,, +transformers/models/moshi/__pycache__/modeling_moshi.cpython-313.pyc,, +transformers/models/moshi/configuration_moshi.py,sha256=zVqARdo5wQHERpun-Z1f1mw1_ddLTn0fQsvs2SjE5J8,16104 +transformers/models/moshi/modeling_moshi.py,sha256=S0HaI322DCLsG8HTYllVAlh73QyYnWtzwwesCaLYnyE,123767 +transformers/models/mpnet/__init__.py,sha256=agt4uraqHTtlIphsDB17XVAPzCKHaPBKlVaQkKHxRyM,1109 +transformers/models/mpnet/__pycache__/__init__.cpython-313.pyc,, +transformers/models/mpnet/__pycache__/configuration_mpnet.cpython-313.pyc,, +transformers/models/mpnet/__pycache__/modeling_mpnet.cpython-313.pyc,, +transformers/models/mpnet/__pycache__/modeling_tf_mpnet.cpython-313.pyc,, +transformers/models/mpnet/__pycache__/tokenization_mpnet.cpython-313.pyc,, +transformers/models/mpnet/__pycache__/tokenization_mpnet_fast.cpython-313.pyc,, +transformers/models/mpnet/configuration_mpnet.py,sha256=DsCgTVE6hDGcaVxd2yqEPj7Ph-JLE2nPyt1AJlVZkx4,5327 +transformers/models/mpnet/modeling_mpnet.py,sha256=3SI7QLERArtBqN0rRbsTMaskn_GynI-8V7DGSnob9xA,37898 +transformers/models/mpnet/modeling_tf_mpnet.py,sha256=tIjgRjIel676orRDAYK6oQ1YnPsPMY7rlCD0cMBdc_Y,55535 +transformers/models/mpnet/tokenization_mpnet.py,sha256=NNYv8Zwj-6RFWQ7Rynjpe1oIvEZSOBl209DlaBju-Ro,22442 +transformers/models/mpnet/tokenization_mpnet_fast.py,sha256=2j7lcdtgYzsQy52RV-dTbGUwgczxY5qO6siJauxsnuY,9180 +transformers/models/mpt/__init__.py,sha256=DAIIAY0kPL-bXMkPUvxmP97HCXPi-SoM3NLnlJJYarg,987 +transformers/models/mpt/__pycache__/__init__.cpython-313.pyc,, +transformers/models/mpt/__pycache__/configuration_mpt.cpython-313.pyc,, +transformers/models/mpt/__pycache__/modeling_mpt.cpython-313.pyc,, +transformers/models/mpt/configuration_mpt.py,sha256=S-Ah1uArFpNIGv97PC0mNcW_JBvM5I83TRjM61KjNZ0,10499 +transformers/models/mpt/modeling_mpt.py,sha256=tqttIJvVRzR8enDLj0KtVSX4ZHxm2kk9a312LZZYy1g,35426 +transformers/models/mra/__init__.py,sha256=51mnm4DFq6aWxOsmaaVZDL28QozNauXyTtbEihDxUQU,987 +transformers/models/mra/__pycache__/__init__.cpython-313.pyc,, +transformers/models/mra/__pycache__/configuration_mra.cpython-313.pyc,, +transformers/models/mra/__pycache__/modeling_mra.cpython-313.pyc,, +transformers/models/mra/configuration_mra.py,sha256=oNhRz6PdvUK_ugoiAhHDuNkGgBNyDguATgQdKeTJBnY,6536 +transformers/models/mra/modeling_mra.py,sha256=bcVhUeWeW7dP6M6heiSwSMrzqXzXOQKfGS7wNwpxqh4,57289 +transformers/models/mt5/__init__.py,sha256=UK8vGX9r6fPdzPaJKCbGJ7RCqKOdIo-7H9V-Qp8rwEg,1095 +transformers/models/mt5/__pycache__/__init__.cpython-313.pyc,, +transformers/models/mt5/__pycache__/configuration_mt5.cpython-313.pyc,, +transformers/models/mt5/__pycache__/modeling_flax_mt5.cpython-313.pyc,, +transformers/models/mt5/__pycache__/modeling_mt5.cpython-313.pyc,, +transformers/models/mt5/__pycache__/modeling_tf_mt5.cpython-313.pyc,, +transformers/models/mt5/__pycache__/tokenization_mt5.cpython-313.pyc,, +transformers/models/mt5/__pycache__/tokenization_mt5_fast.cpython-313.pyc,, +transformers/models/mt5/configuration_mt5.py,sha256=oXTmtVxXx5i-S70WEcmaZo_kI1CUKCaxA7aeos5iX7k,8011 +transformers/models/mt5/modeling_flax_mt5.py,sha256=9WjlLB_EV9WDiy-rBxzVUPocsHrv02cEa4OB8lVR6EA,4329 +transformers/models/mt5/modeling_mt5.py,sha256=DZLh3cjsngyRoG1X6YHjtWmviHi7oTcBJuIhQe0Gve4,113688 +transformers/models/mt5/modeling_tf_mt5.py,sha256=EIUkWvuApAbiaX6qhveT1KC43s_NDmQazLrbYT45aao,3406 +transformers/models/mt5/tokenization_mt5.py,sha256=AckaXSw5OojOGLezMhrsv2a9BMZXwzhy5IsT3hvp_Q8,746 +transformers/models/mt5/tokenization_mt5_fast.py,sha256=1npEFH_c4nDQxOFNoqcGNW30KCWe04BpLrrv7aDcDQ8,762 +transformers/models/musicgen/__init__.py,sha256=iwtW9pg6iDe5D2dWVC4IRU8QbNmRK5kMqPCM8fsUSgo,1036 +transformers/models/musicgen/__pycache__/__init__.cpython-313.pyc,, +transformers/models/musicgen/__pycache__/configuration_musicgen.cpython-313.pyc,, +transformers/models/musicgen/__pycache__/modeling_musicgen.cpython-313.pyc,, +transformers/models/musicgen/__pycache__/processing_musicgen.cpython-313.pyc,, +transformers/models/musicgen/configuration_musicgen.py,sha256=YpsrhRaEqRAOYYlkx5uyelz5iq2dwvU8_mkknRIN9AM,10959 +transformers/models/musicgen/modeling_musicgen.py,sha256=SYb95MPeR2euWCbqhILTLXN0uTxE13EupONfT4ECS3w,118192 +transformers/models/musicgen/processing_musicgen.py,sha256=wmNkDWi3myrHNBldIVy6ikOdz9nRJKeqUSimsEw_23U,4591 +transformers/models/musicgen_melody/__init__.py,sha256=WVEsVs7g0XlpO_yd1X0X4QnMjhG0h_n6T41FpdJcnS8,1011 +transformers/models/musicgen_melody/__pycache__/__init__.cpython-313.pyc,, +transformers/models/musicgen_melody/__pycache__/configuration_musicgen_melody.cpython-313.pyc,, +transformers/models/musicgen_melody/__pycache__/feature_extraction_musicgen_melody.cpython-313.pyc,, +transformers/models/musicgen_melody/__pycache__/modeling_musicgen_melody.cpython-313.pyc,, +transformers/models/musicgen_melody/__pycache__/processing_musicgen_melody.cpython-313.pyc,, +transformers/models/musicgen_melody/configuration_musicgen_melody.py,sha256=wlnZBqJQ0XElTRvI2zPc1PGLarzMsWBQaHKYmkensmk,12016 +transformers/models/musicgen_melody/feature_extraction_musicgen_melody.py,sha256=onUVyD4VSztogKli3SlXeuhd6cNn5EnH6PSjx6Lj36Y,15359 +transformers/models/musicgen_melody/modeling_musicgen_melody.py,sha256=wU_GuVDCdGR-ju824mDJFr1DW12Rxd5HNgviT-t8qmM,111404 +transformers/models/musicgen_melody/processing_musicgen_melody.py,sha256=gUo8sWpKuyZTneOnx5r_HPw7QE8Ed3Togmz7g8fGJBs,6085 +transformers/models/mvp/__init__.py,sha256=0e0-wP4EkfzPiO_BlHlmyVUEq-1kb9RHY2Ikbk66W7s,1064 +transformers/models/mvp/__pycache__/__init__.cpython-313.pyc,, +transformers/models/mvp/__pycache__/configuration_mvp.cpython-313.pyc,, +transformers/models/mvp/__pycache__/modeling_mvp.cpython-313.pyc,, +transformers/models/mvp/__pycache__/tokenization_mvp.cpython-313.pyc,, +transformers/models/mvp/__pycache__/tokenization_mvp_fast.cpython-313.pyc,, +transformers/models/mvp/configuration_mvp.py,sha256=Ah_EG0nItOD3_y_WDad6cnCcgGY2TXWuJzxuLJQ6fq4,8451 +transformers/models/mvp/modeling_mvp.py,sha256=xWrWpErKHzXD1z57_52NKB3ht1TOUCmlugUhPOFl-P8,82194 +transformers/models/mvp/tokenization_mvp.py,sha256=7Q1V8hHo1hd9RCKu-A1lv2WNAcTbxhBidKNPHOLjwyc,16206 +transformers/models/mvp/tokenization_mvp_fast.py,sha256=xQrPiI91_CG7II-gQEJs7rtTSMRbKNARc185MOa5JQs,11819 +transformers/models/myt5/__init__.py,sha256=MFQX-RuvZujGb_twBWBQpTt4NZq6FxreEysWmF2fFGI,955 +transformers/models/myt5/__pycache__/__init__.cpython-313.pyc,, +transformers/models/myt5/__pycache__/tokenization_myt5.cpython-313.pyc,, +transformers/models/myt5/tokenization_myt5.py,sha256=GdQXAMMCaCIbfJI-Hry-9myIFXO2TLpoO6D8VC1HpAs,15535 +transformers/models/nemotron/__init__.py,sha256=ZwaMH1AQ0VIuFnouYe0Sx0HcCGA7PaCp3-_yw3xjeQA,997 +transformers/models/nemotron/__pycache__/__init__.cpython-313.pyc,, +transformers/models/nemotron/__pycache__/configuration_nemotron.cpython-313.pyc,, +transformers/models/nemotron/__pycache__/modeling_nemotron.cpython-313.pyc,, +transformers/models/nemotron/configuration_nemotron.py,sha256=QMH_Mw48ZgCvovfE3MtUM6W_34DVUb7unpr27JaVTIg,7399 +transformers/models/nemotron/modeling_nemotron.py,sha256=p9LxqWzWN6axZVNfiHtnLkIw1TkU9tksiX_kgwXfmh4,43986 +transformers/models/nllb/__init__.py,sha256=MLFrxhOJ3xvOAcRulvCEMoKsajLuudllZLMrYDYQOas,997 +transformers/models/nllb/__pycache__/__init__.cpython-313.pyc,, +transformers/models/nllb/__pycache__/tokenization_nllb.cpython-313.pyc,, +transformers/models/nllb/__pycache__/tokenization_nllb_fast.cpython-313.pyc,, +transformers/models/nllb/tokenization_nllb.py,sha256=nHIf5mrI6gDEugszKVOwpB3mWPE5a1KcBFK2qRTrg-E,19158 +transformers/models/nllb/tokenization_nllb_fast.py,sha256=uT2QAtg_upIEJ9W6lKkDi7EDHOqrB08oUH6OTGNagm4,15822 +transformers/models/nllb_moe/__init__.py,sha256=sAfoAnhHK_reU1a2WUoF1rFtPBckeGGrzJCD8gUv54A,997 +transformers/models/nllb_moe/__pycache__/__init__.cpython-313.pyc,, +transformers/models/nllb_moe/__pycache__/configuration_nllb_moe.cpython-313.pyc,, +transformers/models/nllb_moe/__pycache__/modeling_nllb_moe.cpython-313.pyc,, +transformers/models/nllb_moe/configuration_nllb_moe.py,sha256=jHKoRpGbrlNEcBiOk3b2fPOo1m6OD7Tx11F9r8SSd1Y,11222 +transformers/models/nllb_moe/modeling_nllb_moe.py,sha256=kESo7r71_BREOivYAb-OBSqmBa7AYNAkUnTcADyVCLw,82111 +transformers/models/nougat/__init__.py,sha256=sFYK9O1tIETKks9tQ5d6X3gGWAFfTXNge06ZdnnKV9s,1090 +transformers/models/nougat/__pycache__/__init__.cpython-313.pyc,, +transformers/models/nougat/__pycache__/image_processing_nougat.cpython-313.pyc,, +transformers/models/nougat/__pycache__/image_processing_nougat_fast.cpython-313.pyc,, +transformers/models/nougat/__pycache__/processing_nougat.cpython-313.pyc,, +transformers/models/nougat/__pycache__/tokenization_nougat_fast.cpython-313.pyc,, +transformers/models/nougat/image_processing_nougat.py,sha256=tOeyzKQYt9uaENoWNdHCfyqi5l_AHqRpJqMnKt7wL_A,24225 +transformers/models/nougat/image_processing_nougat_fast.py,sha256=uZRGpPlOrwmWS7am2kBIc2ytCEfAUAlXD6AIksQpHWE,11315 +transformers/models/nougat/processing_nougat.py,sha256=1gOCgHjtvBLDNF82ckjBAgpNjIlewP1y_vaMKhkdcac,6255 +transformers/models/nougat/tokenization_nougat_fast.py,sha256=Z0T6qwb5Frgn_QCDRMectgQbKaxnt943q8QOFjenC40,24560 +transformers/models/nystromformer/__init__.py,sha256=CwEg6m4nJW_AfNDws_MIv1O1x5IO3xPp-FYqirlFXwk,1007 +transformers/models/nystromformer/__pycache__/__init__.cpython-313.pyc,, +transformers/models/nystromformer/__pycache__/configuration_nystromformer.cpython-313.pyc,, +transformers/models/nystromformer/__pycache__/modeling_nystromformer.cpython-313.pyc,, +transformers/models/nystromformer/configuration_nystromformer.py,sha256=UyLmPF2li3_ADTz9tS1h5t4CDY5d5GzsfeC9hG42RzI,6402 +transformers/models/nystromformer/modeling_nystromformer.py,sha256=bHXMtBDSPg8BtHRf_kP8R46evgvKQjiYkDtUvZ-8PYE,43471 +transformers/models/olmo/__init__.py,sha256=x9u_5vqI52-uBuj89-6aYucGDlvBUEPSOhPLLB1asok,1009 +transformers/models/olmo/__pycache__/__init__.cpython-313.pyc,, +transformers/models/olmo/__pycache__/configuration_olmo.cpython-313.pyc,, +transformers/models/olmo/__pycache__/modeling_olmo.cpython-313.pyc,, +transformers/models/olmo/__pycache__/modular_olmo.cpython-313.pyc,, +transformers/models/olmo/configuration_olmo.py,sha256=s_YotovUtp-VmB7q9RV4B1TKgjhp3gE6Ucqg72I3GJc,9423 +transformers/models/olmo/modeling_olmo.py,sha256=ZPvSpIp_35WKiHTFL9-pdbVfJsaHRvkP7VutXsGWSJM,19888 +transformers/models/olmo/modular_olmo.py,sha256=UTS-10TNLe_ZDV2qVLIw6y03ub4IY3j602iXn2UupQc,7102 +transformers/models/olmo2/__init__.py,sha256=Frt9nEMsfPszod1lkFTAJUobU50IjOFlqI6uJkuQVcY,1011 +transformers/models/olmo2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/olmo2/__pycache__/configuration_olmo2.cpython-313.pyc,, +transformers/models/olmo2/__pycache__/modeling_olmo2.cpython-313.pyc,, +transformers/models/olmo2/__pycache__/modular_olmo2.cpython-313.pyc,, +transformers/models/olmo2/configuration_olmo2.py,sha256=kqBjnAs0k-pZ-Y8zNMRWQ-6gav-YpOpg6oy9LZ-AhpE,9439 +transformers/models/olmo2/modeling_olmo2.py,sha256=sTIas1rDyOhJdnwUewlvU8LQ3GGzwWp-2Nyism4xPBQ,20317 +transformers/models/olmo2/modular_olmo2.py,sha256=9O8WahU9g_bYicjUvuzxdT6sxdZ7IWt-8UVDLw7dffY,14200 +transformers/models/olmo3/__init__.py,sha256=l35_AKxpPmjrZIpeDw2nAA_BT59faflVjx3mXAVx9ro,1007 +transformers/models/olmo3/__pycache__/__init__.cpython-313.pyc,, +transformers/models/olmo3/__pycache__/configuration_olmo3.cpython-313.pyc,, +transformers/models/olmo3/__pycache__/modeling_olmo3.cpython-313.pyc,, +transformers/models/olmo3/__pycache__/modular_olmo3.cpython-313.pyc,, +transformers/models/olmo3/configuration_olmo3.py,sha256=-yRbWIsHxwTEd0CqyVMapJDcWKCRoQwTZrAUmC3oJX8,12229 +transformers/models/olmo3/modeling_olmo3.py,sha256=5rKOQ-H3_wYfbPEHBx3Ry2KmGiT3AfyBghf-6vL57T8,22266 +transformers/models/olmo3/modular_olmo3.py,sha256=BHe0QjEPezYJS-_5AhixHdqo8l6w0q2yf1997VuvtQY,19666 +transformers/models/olmoe/__init__.py,sha256=eQ6mx9aBIcA4RiK3p7dbqORokkuMfQNRss06E8uWNrk,991 +transformers/models/olmoe/__pycache__/__init__.cpython-313.pyc,, +transformers/models/olmoe/__pycache__/configuration_olmoe.cpython-313.pyc,, +transformers/models/olmoe/__pycache__/modeling_olmoe.cpython-313.pyc,, +transformers/models/olmoe/configuration_olmoe.py,sha256=_NEJ3nvuHqEOJND_jWjpqfV-mR7-CB9lUnVPoDmhCp4,9069 +transformers/models/olmoe/modeling_olmoe.py,sha256=JIcXqEd_vLSxbOZIqazoKbay9KACGRp_ZO5yDrZfTA0,51495 +transformers/models/omdet_turbo/__init__.py,sha256=XIckpuo9tkT7NB5uTs9wLdpxr9GDedQPVJL2P8XU-7Q,1045 +transformers/models/omdet_turbo/__pycache__/__init__.cpython-313.pyc,, +transformers/models/omdet_turbo/__pycache__/configuration_omdet_turbo.cpython-313.pyc,, +transformers/models/omdet_turbo/__pycache__/modeling_omdet_turbo.cpython-313.pyc,, +transformers/models/omdet_turbo/__pycache__/processing_omdet_turbo.cpython-313.pyc,, +transformers/models/omdet_turbo/configuration_omdet_turbo.py,sha256=2XMjtVGwInCV4GpQW-FHqsvXCoce1rsiBMT9j2BXevo,14933 +transformers/models/omdet_turbo/modeling_omdet_turbo.py,sha256=-FNbrykecMvG2PmYHGvshaidc9sePisxL7semsnaLRE,74062 +transformers/models/omdet_turbo/processing_omdet_turbo.py,sha256=lh2Pk0x04IXmSE8TcvnmP-FRoP48T2Ev39u64OeMocY,16871 +transformers/models/oneformer/__init__.py,sha256=MrdVp7ZBJOVbWMpwojOPnEzDDW2HSqs5oSZG81jdCQI,1136 +transformers/models/oneformer/__pycache__/__init__.cpython-313.pyc,, +transformers/models/oneformer/__pycache__/configuration_oneformer.cpython-313.pyc,, +transformers/models/oneformer/__pycache__/image_processing_oneformer.cpython-313.pyc,, +transformers/models/oneformer/__pycache__/image_processing_oneformer_fast.cpython-313.pyc,, +transformers/models/oneformer/__pycache__/modeling_oneformer.cpython-313.pyc,, +transformers/models/oneformer/__pycache__/processing_oneformer.cpython-313.pyc,, +transformers/models/oneformer/configuration_oneformer.py,sha256=GeVa40j8P2XaKbU58JWE9C2EYXNhgUJjY13kUzXliMU,13678 +transformers/models/oneformer/image_processing_oneformer.py,sha256=ahDeXLk5CA-nXd32iTPy5otK7i27xxJNp_HWKTrcxuA,60667 +transformers/models/oneformer/image_processing_oneformer_fast.py,sha256=sBclZOyjttl_lbchZ_aO90q_v3DrTv5g8sLV3ShNJUU,41187 +transformers/models/oneformer/modeling_oneformer.py,sha256=hCDHd4bwMUCdKms9u96h17GMAzhr0B4PWnm-n_ZgZ7Q,139862 +transformers/models/oneformer/processing_oneformer.py,sha256=dUSYgtezsucPHd9xJB5p2Tad6R-AI2h_rFe0MiM-9FI,9179 +transformers/models/openai/__init__.py,sha256=q0fAl8ajoJyknHe5A3ZHuHH3zww8xdupt_j49lIaObY,1114 +transformers/models/openai/__pycache__/__init__.cpython-313.pyc,, +transformers/models/openai/__pycache__/configuration_openai.cpython-313.pyc,, +transformers/models/openai/__pycache__/modeling_openai.cpython-313.pyc,, +transformers/models/openai/__pycache__/modeling_tf_openai.cpython-313.pyc,, +transformers/models/openai/__pycache__/tokenization_openai.cpython-313.pyc,, +transformers/models/openai/__pycache__/tokenization_openai_fast.cpython-313.pyc,, +transformers/models/openai/configuration_openai.py,sha256=ERFfcrsaGEuG-8WnuBDfYyHR7uc5ihEr9JfItBMGZm0,7109 +transformers/models/openai/modeling_openai.py,sha256=w-NnaXEUNT03W7lvl1_EFj7pyHSVLkZujxftr94VnHg,37721 +transformers/models/openai/modeling_tf_openai.py,sha256=mZWr6qc8fCXTR6y6GXLc_E_FgHn5-CB6dzMOhGXbfVM,40857 +transformers/models/openai/tokenization_openai.py,sha256=MhxS6G-hQLHLdmarGxOPBpvMEbNLHuFRZehOjzg-o90,15159 +transformers/models/openai/tokenization_openai_fast.py,sha256=qBalVcRbqq9AZAnzkFvYTbokp4eU-BvgO3QIWYoqndo,2553 +transformers/models/opt/__init__.py,sha256=Xk3Z-OdrOC4Y5J0KOEIB74Pp4PsfAllBI503NT7yFk8,1059 +transformers/models/opt/__pycache__/__init__.cpython-313.pyc,, +transformers/models/opt/__pycache__/configuration_opt.cpython-313.pyc,, +transformers/models/opt/__pycache__/modeling_flax_opt.cpython-313.pyc,, +transformers/models/opt/__pycache__/modeling_opt.cpython-313.pyc,, +transformers/models/opt/__pycache__/modeling_tf_opt.cpython-313.pyc,, +transformers/models/opt/configuration_opt.py,sha256=nEHN7nBCghjCfcU_vueoTL5TfCMc6JUE6cUH6knhnxM,6694 +transformers/models/opt/modeling_flax_opt.py,sha256=a1OCINHVTj-osjuJUxfYZgTS-1j7r6EPT-TgAD9lP74,31631 +transformers/models/opt/modeling_opt.py,sha256=uCv1_ZVaFaWLz2ip711WZMiB-tZcf71rvJXk2OwXPlY,48327 +transformers/models/opt/modeling_tf_opt.py,sha256=gZ8xHAIaZDYmsvHbxNI66ITB0QIaYoYdbLF7uNVuTEg,49360 +transformers/models/ovis2/__init__.py,sha256=df6-Fy8DbIIuQ9a29Ko9XPCSZm0Yh8hNEnTLR4NUgj0,1137 +transformers/models/ovis2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/ovis2/__pycache__/configuration_ovis2.cpython-313.pyc,, +transformers/models/ovis2/__pycache__/image_processing_ovis2.cpython-313.pyc,, +transformers/models/ovis2/__pycache__/image_processing_ovis2_fast.cpython-313.pyc,, +transformers/models/ovis2/__pycache__/modeling_ovis2.cpython-313.pyc,, +transformers/models/ovis2/__pycache__/modular_ovis2.cpython-313.pyc,, +transformers/models/ovis2/__pycache__/processing_ovis2.cpython-313.pyc,, +transformers/models/ovis2/configuration_ovis2.py,sha256=TPLM3X-fnZ5IYjF8vyZ-bxtF9ob8Wu5QvrxX4pR7eWg,8187 +transformers/models/ovis2/image_processing_ovis2.py,sha256=3mY4y7qwfa-gsnQK9SOO3Iy-IyAqTMSpboFSQ7KgBP4,27859 +transformers/models/ovis2/image_processing_ovis2_fast.py,sha256=e_FV-wBDDcOAe9j-oAc_fuw_K05y0S5M48SbT1ZR9nc,10837 +transformers/models/ovis2/modeling_ovis2.py,sha256=UltZ0Poulsn9LkhgeTsMsp3N7JMKJsCaRRtgEVg4_Vw,34467 +transformers/models/ovis2/modular_ovis2.py,sha256=wgtKNriRoUz-rzByYPH62ysnlXrGTJv12RtYFlXPjC4,17240 +transformers/models/ovis2/processing_ovis2.py,sha256=r_b1_6K-vQ4tj1-ziX1luv4A_IEd1pjzS0jKN9h3ltQ,7972 +transformers/models/owlv2/__init__.py,sha256=vHnQwYJ0hEc12ofOV3b1k9fHscBgfQEXcLLgo4-H3GU,1116 +transformers/models/owlv2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/owlv2/__pycache__/configuration_owlv2.cpython-313.pyc,, +transformers/models/owlv2/__pycache__/image_processing_owlv2.cpython-313.pyc,, +transformers/models/owlv2/__pycache__/image_processing_owlv2_fast.cpython-313.pyc,, +transformers/models/owlv2/__pycache__/modeling_owlv2.cpython-313.pyc,, +transformers/models/owlv2/__pycache__/modular_owlv2.cpython-313.pyc,, +transformers/models/owlv2/__pycache__/processing_owlv2.cpython-313.pyc,, +transformers/models/owlv2/configuration_owlv2.py,sha256=18Krr0RoZ25eU0eCSPegHXXKAoA64zw-bknsCeBkPTs,13134 +transformers/models/owlv2/image_processing_owlv2.py,sha256=hPbsaQX5Grzdj9il2MpCUZY0ky0nFVed4Ff1jifaIlI,28051 +transformers/models/owlv2/image_processing_owlv2_fast.py,sha256=DnSdjtmXQr_v-wQT5vexBEg8Sumv2qvSjoMIutkyz5Y,18089 +transformers/models/owlv2/modeling_owlv2.py,sha256=GZdyChUvGMt1PU_uGUKpOEofRD9ZxkdkQgtNoKCh8ec,77981 +transformers/models/owlv2/modular_owlv2.py,sha256=jXNLUwp8xViq1yy_r1xFMSghhv99MKnaH1Zie4o_4VA,8725 +transformers/models/owlv2/processing_owlv2.py,sha256=akRzrulhauF5WtoA0nhTufMmIENKjjkqtCqSGboKS0Q,14533 +transformers/models/owlvit/__init__.py,sha256=Nhrrja_j2RZtj-rQS6TDJ8upQqnMptnFukq49QAkito,1166 +transformers/models/owlvit/__pycache__/__init__.cpython-313.pyc,, +transformers/models/owlvit/__pycache__/configuration_owlvit.cpython-313.pyc,, +transformers/models/owlvit/__pycache__/feature_extraction_owlvit.cpython-313.pyc,, +transformers/models/owlvit/__pycache__/image_processing_owlvit.cpython-313.pyc,, +transformers/models/owlvit/__pycache__/image_processing_owlvit_fast.cpython-313.pyc,, +transformers/models/owlvit/__pycache__/modeling_owlvit.cpython-313.pyc,, +transformers/models/owlvit/__pycache__/processing_owlvit.cpython-313.pyc,, +transformers/models/owlvit/configuration_owlvit.py,sha256=KxKdzp6xBZOAEBDSGu_LRGYBKhjEg7tU8dfqLQXv6wo,14435 +transformers/models/owlvit/feature_extraction_owlvit.py,sha256=49Ic56gmQQtE_WEmzzyE9bVBdS5RMkG3vOK1cBcjc5g,1300 +transformers/models/owlvit/image_processing_owlvit.py,sha256=cuE9jtFrwcGN_3mkd-0ZguU-oU7wVFDVeo1I0xTAVAE,29481 +transformers/models/owlvit/image_processing_owlvit_fast.py,sha256=bUbcbutdaVZWbQ1Taz5rJX40PR401uSSy12JGqhm948,10519 +transformers/models/owlvit/modeling_owlvit.py,sha256=8Dp2cnnWqFjiiM2JGwYgf9XABORJ2RPdG0KhypB95VU,73411 +transformers/models/owlvit/processing_owlvit.py,sha256=LcKsqDVAPZobncWWhJAJAjjAMMfaUmoz7CbMO0FrGVA,15279 +transformers/models/paligemma/__init__.py,sha256=nKnTTLC8XYlI7uYfS8h-D4vz3gFhknkNeDlZIwZlZ9w,1039 +transformers/models/paligemma/__pycache__/__init__.cpython-313.pyc,, +transformers/models/paligemma/__pycache__/configuration_paligemma.cpython-313.pyc,, +transformers/models/paligemma/__pycache__/modeling_paligemma.cpython-313.pyc,, +transformers/models/paligemma/__pycache__/processing_paligemma.cpython-313.pyc,, +transformers/models/paligemma/configuration_paligemma.py,sha256=BTK5bsUZwUovhsGqWHIzYidzy-XyBEtDEROPh3IZJ9w,5362 +transformers/models/paligemma/modeling_paligemma.py,sha256=ta1GVvFatkYZY6o-N-N7oJxD8aVhA4WTxC-awk18DNk,27366 +transformers/models/paligemma/processing_paligemma.py,sha256=nDroihOWd88xWiNPNoJHpumP-KzHhTy0MwfCf7VRgEc,15172 +transformers/models/parakeet/__init__.py,sha256=hUBWAelpUTDzNsO6U5c1w2mts5ePvfNbov_OzhNh8X4,1090 +transformers/models/parakeet/__pycache__/__init__.cpython-313.pyc,, +transformers/models/parakeet/__pycache__/configuration_parakeet.cpython-313.pyc,, +transformers/models/parakeet/__pycache__/feature_extraction_parakeet.cpython-313.pyc,, +transformers/models/parakeet/__pycache__/modeling_parakeet.cpython-313.pyc,, +transformers/models/parakeet/__pycache__/modular_parakeet.cpython-313.pyc,, +transformers/models/parakeet/__pycache__/processing_parakeet.cpython-313.pyc,, +transformers/models/parakeet/__pycache__/tokenization_parakeet_fast.cpython-313.pyc,, +transformers/models/parakeet/configuration_parakeet.py,sha256=iEzIGIAuJs33gfxpPLHuI-tWEZEd6tmxowuFzMXsxWs,10195 +transformers/models/parakeet/feature_extraction_parakeet.py,sha256=x6dbut6ur6acWsyvF7EMzh5qKwxmB1FcMkkYavVMDHU,13147 +transformers/models/parakeet/modeling_parakeet.py,sha256=0bE5o3gQgBeh1bJF1XyoantIdrZjMZtPoMODf-Y_urU,32330 +transformers/models/parakeet/modular_parakeet.py,sha256=cah4ax8JGGtR3yUsGJCDPLd4mket4XVdD9x6h_5yi_k,26651 +transformers/models/parakeet/processing_parakeet.py,sha256=Av8YnWOgSzRM7ENuo0VU_gBkSO9roQB0PR2q7q_f1n4,3221 +transformers/models/parakeet/tokenization_parakeet_fast.py,sha256=5oWBkuJpIQKxYKaea1bw_ju8Wrq0bGz2wdVvQi5IaRs,1946 +transformers/models/patchtsmixer/__init__.py,sha256=deFjF_Tu67XcAcNHaq1PXO77N4kVW9wG80SnXBaeagE,1005 +transformers/models/patchtsmixer/__pycache__/__init__.cpython-313.pyc,, +transformers/models/patchtsmixer/__pycache__/configuration_patchtsmixer.cpython-313.pyc,, +transformers/models/patchtsmixer/__pycache__/modeling_patchtsmixer.cpython-313.pyc,, +transformers/models/patchtsmixer/configuration_patchtsmixer.py,sha256=h1w-YRD_Q9AgQUKBRvzxi2JBEW35NbDap8xkdui-c3U,12580 +transformers/models/patchtsmixer/modeling_patchtsmixer.py,sha256=Tb4GZWfKaXEwNt12Rr6TNZp4nQsBXq_lzmUfRR4EyS8,85134 +transformers/models/patchtst/__init__.py,sha256=lrpuBvP25Yq6HZOCyS4yWVYZ47qWzK--rqC0AOIGGPE,997 +transformers/models/patchtst/__pycache__/__init__.cpython-313.pyc,, +transformers/models/patchtst/__pycache__/configuration_patchtst.cpython-313.pyc,, +transformers/models/patchtst/__pycache__/modeling_patchtst.cpython-313.pyc,, +transformers/models/patchtst/configuration_patchtst.py,sha256=FdiHfYFiHvo7kIuOV_zSGPHZ2Q-QYbPEB1ZkqLOc5qE,12309 +transformers/models/patchtst/modeling_patchtst.py,sha256=fYVkC0X1QFCMEmNF9yRMp-hYFphSQPjuwgZTQa-4Rh0,84642 +transformers/models/pegasus/__init__.py,sha256=4b7vCYJfIWUPuKrbcBGTG7LtobUdZ5ZjeQhloScTrXs,1160 +transformers/models/pegasus/__pycache__/__init__.cpython-313.pyc,, +transformers/models/pegasus/__pycache__/configuration_pegasus.cpython-313.pyc,, +transformers/models/pegasus/__pycache__/modeling_flax_pegasus.cpython-313.pyc,, +transformers/models/pegasus/__pycache__/modeling_pegasus.cpython-313.pyc,, +transformers/models/pegasus/__pycache__/modeling_tf_pegasus.cpython-313.pyc,, +transformers/models/pegasus/__pycache__/tokenization_pegasus.cpython-313.pyc,, +transformers/models/pegasus/__pycache__/tokenization_pegasus_fast.cpython-313.pyc,, +transformers/models/pegasus/configuration_pegasus.py,sha256=78-WMVFtUhigUXXJ4PabYJA8S3VpfQW9-2NcM5t8Hlo,7517 +transformers/models/pegasus/modeling_flax_pegasus.py,sha256=Rkc0964DKuqgYPMmzKNcZx4_g3hOqV8kD5udpqy-wRE,66161 +transformers/models/pegasus/modeling_pegasus.py,sha256=TpgN32E98jPHuDqQUd-JTOdcYRjiiuPSGwN2-aVmi0w,77934 +transformers/models/pegasus/modeling_tf_pegasus.py,sha256=_WR-vfHxUBGzBUJVEsw7pajamDJQAeZZYsMvEU_HV_4,74149 +transformers/models/pegasus/tokenization_pegasus.py,sha256=Mlf8ZdllYQGJMktD0ci2aD46NczeKcuw-NZNgt9bkgw,13231 +transformers/models/pegasus/tokenization_pegasus_fast.py,sha256=c6xFEwXqtpScjYMasosqrAlwJMsdJCC_Sjp_BYniK7s,9833 +transformers/models/pegasus_x/__init__.py,sha256=qSLaqKRA1upZOobapHW5MjSZvIEzf-ij-ZmY1VGzqaE,999 +transformers/models/pegasus_x/__pycache__/__init__.cpython-313.pyc,, +transformers/models/pegasus_x/__pycache__/configuration_pegasus_x.cpython-313.pyc,, +transformers/models/pegasus_x/__pycache__/modeling_pegasus_x.cpython-313.pyc,, +transformers/models/pegasus_x/configuration_pegasus_x.py,sha256=RasKHKP1N0ZEvsl81J2Y3jhNAJo0zplnAKI2ZqYJdv4,8132 +transformers/models/pegasus_x/modeling_pegasus_x.py,sha256=30KSzN9JuE7Sbpi6uvBgd8lWGCvV4v1UWf351GcwrjE,79137 +transformers/models/perceiver/__init__.py,sha256=LKUlUJfZGRC1jU6TNkG-4kNy8aIeHIqvAnwLI_33AVY,1186 +transformers/models/perceiver/__pycache__/__init__.cpython-313.pyc,, +transformers/models/perceiver/__pycache__/configuration_perceiver.cpython-313.pyc,, +transformers/models/perceiver/__pycache__/feature_extraction_perceiver.cpython-313.pyc,, +transformers/models/perceiver/__pycache__/image_processing_perceiver.cpython-313.pyc,, +transformers/models/perceiver/__pycache__/image_processing_perceiver_fast.cpython-313.pyc,, +transformers/models/perceiver/__pycache__/modeling_perceiver.cpython-313.pyc,, +transformers/models/perceiver/__pycache__/tokenization_perceiver.cpython-313.pyc,, +transformers/models/perceiver/configuration_perceiver.py,sha256=0h5NCC6iJiA_cOv9gvcpxNgiFc0r25Rvv7PIHh1jp6Q,12236 +transformers/models/perceiver/feature_extraction_perceiver.py,sha256=JK3Y4won5macefR13tx-zdUF_TaHE4RrJllJyYzIhWU,1324 +transformers/models/perceiver/image_processing_perceiver.py,sha256=3Ise4hPNUhsVyIXIUGqKz0KZ1vG1melYTP2AovLJ-Po,17591 +transformers/models/perceiver/image_processing_perceiver_fast.py,sha256=ePz69sn2_CkJmkDJlAnyg9-2ItD_gn3WY4Bc5qPMiPA,5045 +transformers/models/perceiver/modeling_perceiver.py,sha256=8-u-KZuhu7LD5uFA4vaOXHFr2fz5SWcAMhu0Q3pz2K8,137484 +transformers/models/perceiver/tokenization_perceiver.py,sha256=9cCQTtfKJUzwWoUQ3YEBdsh9RrJgsL4N2kxA8fPQuqc,8034 +transformers/models/perception_lm/__init__.py,sha256=RVEjQWlzsHJm3D-3JXLThzBjJLSCMpvFslQMIkvRKiA,1106 +transformers/models/perception_lm/__pycache__/__init__.cpython-313.pyc,, +transformers/models/perception_lm/__pycache__/configuration_perception_lm.cpython-313.pyc,, +transformers/models/perception_lm/__pycache__/image_processing_perception_lm_fast.cpython-313.pyc,, +transformers/models/perception_lm/__pycache__/modeling_perception_lm.cpython-313.pyc,, +transformers/models/perception_lm/__pycache__/modular_perception_lm.cpython-313.pyc,, +transformers/models/perception_lm/__pycache__/processing_perception_lm.cpython-313.pyc,, +transformers/models/perception_lm/__pycache__/video_processing_perception_lm.cpython-313.pyc,, +transformers/models/perception_lm/configuration_perception_lm.py,sha256=ZrA3gpUzZJf3POf1aLhGMU9mVwC5IreP5wOrvvVwqCQ,3888 +transformers/models/perception_lm/image_processing_perception_lm_fast.py,sha256=zw3SaD_dQrx48y98RONJlzInByn1YAADByqIfBPMtU4,13971 +transformers/models/perception_lm/modeling_perception_lm.py,sha256=7dOFsqbr-JdLqtcZoyse8WCyYYM3Kd4ZQXJH-zZ9taM,21281 +transformers/models/perception_lm/modular_perception_lm.py,sha256=ajMCR1lvIHeC58c_nhhhyAbk9-SNvvruFJNIqzqEaXs,18890 +transformers/models/perception_lm/processing_perception_lm.py,sha256=Nx9FdDxaa4RaaOj5EYO43qM9RHOr-zTZ0h8tVPYIeuQ,11407 +transformers/models/perception_lm/video_processing_perception_lm.py,sha256=TOkzfeCdZRTUIQlqojwEOMNkZKpDj25PmNB1ocUJzZw,1575 +transformers/models/persimmon/__init__.py,sha256=T1WqyE78N2TO74u9a9QdRIGaMowYqP6vWv8KhPojkLg,999 +transformers/models/persimmon/__pycache__/__init__.cpython-313.pyc,, +transformers/models/persimmon/__pycache__/configuration_persimmon.cpython-313.pyc,, +transformers/models/persimmon/__pycache__/modeling_persimmon.cpython-313.pyc,, +transformers/models/persimmon/configuration_persimmon.py,sha256=6KV-r-B5sqC9d2Ybzind_YDxqnM0lITfbH5zPqGg6K4,9149 +transformers/models/persimmon/modeling_persimmon.py,sha256=BKE9-S7HM4P8Y3zxSNrWj7PkkG6Jm7mibpoEf6X7lKI,33528 +transformers/models/phi/__init__.py,sha256=4DUgmUqGKcGXxzTrxUVGcacZ43uv3SzXsOV_Ke6oeGg,1006 +transformers/models/phi/__pycache__/__init__.cpython-313.pyc,, +transformers/models/phi/__pycache__/configuration_phi.cpython-313.pyc,, +transformers/models/phi/__pycache__/modeling_phi.cpython-313.pyc,, +transformers/models/phi/__pycache__/modular_phi.cpython-313.pyc,, +transformers/models/phi/configuration_phi.py,sha256=xYk2xva2KXG5k_Dk8ND3JObDjcfPuklUdSkNIT0DYJ8,11172 +transformers/models/phi/modeling_phi.py,sha256=721JyxmHwkNwqRsq7wGd_pdvbOcduwBsFDDpC4oD53w,22133 +transformers/models/phi/modular_phi.py,sha256=v9h_mx4Z4ZIF_czVLBUGGgnfwPL3piLR5jGqL97OJa4,11618 +transformers/models/phi3/__init__.py,sha256=dxyO-jIh0yB6t2Dzs173aRrEnTceVMIYIkg6JxIeyWs,989 +transformers/models/phi3/__pycache__/__init__.cpython-313.pyc,, +transformers/models/phi3/__pycache__/configuration_phi3.cpython-313.pyc,, +transformers/models/phi3/__pycache__/modeling_phi3.cpython-313.pyc,, +transformers/models/phi3/__pycache__/modular_phi3.cpython-313.pyc,, +transformers/models/phi3/configuration_phi3.py,sha256=Yamd8nkyPdC_AtCPA9w0tjSEjaBmxgGKvxA0n0FIIAc,11573 +transformers/models/phi3/modeling_phi3.py,sha256=KyQrfUWpaHR_DtvHOlWigkZz0NwOBzmBsRoyzXPzXA8,23280 +transformers/models/phi3/modular_phi3.py,sha256=mWO0wTSb-MHy6xVvECvFOO-ia331UjLV6BhnK-7fZXE,11132 +transformers/models/phi4_multimodal/__init__.py,sha256=EqoKUvkh9f14qg07g-4MLclztlyiyLfN2qqEp3RGp2w,1170 +transformers/models/phi4_multimodal/__pycache__/__init__.cpython-313.pyc,, +transformers/models/phi4_multimodal/__pycache__/configuration_phi4_multimodal.cpython-313.pyc,, +transformers/models/phi4_multimodal/__pycache__/feature_extraction_phi4_multimodal.cpython-313.pyc,, +transformers/models/phi4_multimodal/__pycache__/image_processing_phi4_multimodal_fast.cpython-313.pyc,, +transformers/models/phi4_multimodal/__pycache__/modeling_phi4_multimodal.cpython-313.pyc,, +transformers/models/phi4_multimodal/__pycache__/modular_phi4_multimodal.cpython-313.pyc,, +transformers/models/phi4_multimodal/__pycache__/processing_phi4_multimodal.cpython-313.pyc,, +transformers/models/phi4_multimodal/configuration_phi4_multimodal.py,sha256=SaLExfxMFf8BaAQCtrmmqbCwpx_mQuEAwhMuKJ5Wcyo,24363 +transformers/models/phi4_multimodal/feature_extraction_phi4_multimodal.py,sha256=T1R76IxQCkMh-_jeP8zrBecywvbDnG5hNt_VJAKs2nk,13413 +transformers/models/phi4_multimodal/image_processing_phi4_multimodal_fast.py,sha256=_E8MwzhMegAXyUxzVhV8HESmNA8RCIUA3GSpEzJLXow,10497 +transformers/models/phi4_multimodal/modeling_phi4_multimodal.py,sha256=Q5jhaccvfZZKd0vMyiS0Jk_29C2tR8qjnZUidSWdyCM,79501 +transformers/models/phi4_multimodal/modular_phi4_multimodal.py,sha256=W5j3Y5ae5rcqhuJkHjaFYd8IcXKk4mA232Czu0rn4ao,77182 +transformers/models/phi4_multimodal/processing_phi4_multimodal.py,sha256=6N61sgrHtffBZSLsWsY6fYBgG0EYIfE4cwbuuKElsO4,8107 +transformers/models/phimoe/__init__.py,sha256=wGasPysu0EH_q0QGaZmXqQL57GxfZn8NTsvB2I6U2ro,1013 +transformers/models/phimoe/__pycache__/__init__.cpython-313.pyc,, +transformers/models/phimoe/__pycache__/configuration_phimoe.cpython-313.pyc,, +transformers/models/phimoe/__pycache__/modeling_phimoe.cpython-313.pyc,, +transformers/models/phimoe/configuration_phimoe.py,sha256=bmX2NBq89oFKQ5PnFmkcknw5dT6A9w2D9psS8jubltw,10238 +transformers/models/phimoe/modeling_phimoe.py,sha256=zh6xRbDkvqbFwpWWh1PCLlbs_isWPcH3KDzlvN6CYw8,59885 +transformers/models/phobert/__init__.py,sha256=mau-2HIOzSk8qGIhxivVBPPYTx3hhdgoKPtnptDF38M,958 +transformers/models/phobert/__pycache__/__init__.cpython-313.pyc,, +transformers/models/phobert/__pycache__/tokenization_phobert.cpython-313.pyc,, +transformers/models/phobert/tokenization_phobert.py,sha256=0ItqQt-YiRb44Wqyp6e59UQOc_wSmErRFAziorm_w6o,13111 +transformers/models/pix2struct/__init__.py,sha256=ivncogrVjZZ6ag6FYHJ0XqyCMJYbsCYlh5boqxe09Yo,1089 +transformers/models/pix2struct/__pycache__/__init__.cpython-313.pyc,, +transformers/models/pix2struct/__pycache__/configuration_pix2struct.cpython-313.pyc,, +transformers/models/pix2struct/__pycache__/image_processing_pix2struct.cpython-313.pyc,, +transformers/models/pix2struct/__pycache__/modeling_pix2struct.cpython-313.pyc,, +transformers/models/pix2struct/__pycache__/processing_pix2struct.cpython-313.pyc,, +transformers/models/pix2struct/configuration_pix2struct.py,sha256=eHg19KzSW5bh2dVTgMH7vhZPZu2yg6iEpy8DNO6bk8U,15370 +transformers/models/pix2struct/image_processing_pix2struct.py,sha256=WZ9JONvTc8PYZMvPJ2JYY7b3OFUws47VTwFi4iwYpHE,19799 +transformers/models/pix2struct/modeling_pix2struct.py,sha256=P_bq-C7wvfFlx7pWIw4ChVPctobvrzXJgIcClInfwwQ,72168 +transformers/models/pix2struct/processing_pix2struct.py,sha256=d_oNjxTHgeu06zbPe5ZjHUvcignufU4Wql8XBhvvgwk,5677 +transformers/models/pixtral/__init__.py,sha256=WKCxuWpCeTYsYSaTH1XnUcGkIHEx5BIIXwwwqG_E83s,1126 +transformers/models/pixtral/__pycache__/__init__.cpython-313.pyc,, +transformers/models/pixtral/__pycache__/configuration_pixtral.cpython-313.pyc,, +transformers/models/pixtral/__pycache__/image_processing_pixtral.cpython-313.pyc,, +transformers/models/pixtral/__pycache__/image_processing_pixtral_fast.cpython-313.pyc,, +transformers/models/pixtral/__pycache__/modeling_pixtral.cpython-313.pyc,, +transformers/models/pixtral/__pycache__/processing_pixtral.cpython-313.pyc,, +transformers/models/pixtral/configuration_pixtral.py,sha256=86cY74VW7J8XqU1JbvpxLqOXnnzoPh7I_9zja8j3Wng,4237 +transformers/models/pixtral/image_processing_pixtral.py,sha256=LBgaL_2tIZxryRkONb8iS9c5Fl6R1DEgTgPJkPDkF10,22081 +transformers/models/pixtral/image_processing_pixtral_fast.py,sha256=ypfQ2Bi-y3wkCovLvXlGL5hhKSPXe66eCCTcnzzShIU,7851 +transformers/models/pixtral/modeling_pixtral.py,sha256=QMwr3lbVL0SE3MdU3sj04S49LPojvbj0-5zXFx1TQFE,20982 +transformers/models/pixtral/processing_pixtral.py,sha256=dX0I3wGEgrD7gXndAK6Qa1OevpD_ppcBIKfxDZmhfsA,11896 +transformers/models/plbart/__init__.py,sha256=jmP857QTG7jGfr9n0qK3TB_1-hdVDD1ajtJvP6C7FIw,1032 +transformers/models/plbart/__pycache__/__init__.cpython-313.pyc,, +transformers/models/plbart/__pycache__/configuration_plbart.cpython-313.pyc,, +transformers/models/plbart/__pycache__/modeling_plbart.cpython-313.pyc,, +transformers/models/plbart/__pycache__/modular_plbart.cpython-313.pyc,, +transformers/models/plbart/__pycache__/tokenization_plbart.cpython-313.pyc,, +transformers/models/plbart/configuration_plbart.py,sha256=N3T4lbCCGP3nYNIHYah1yMvDy1FiJqgpdHIAKJFfcTQ,8621 +transformers/models/plbart/modeling_plbart.py,sha256=RUbyPW9QC03iF0lyIjd4ByIU6IScd9GLPl2U4e2ydYE,80652 +transformers/models/plbart/modular_plbart.py,sha256=_7kGkI9nakn2y-itDsNamyT8qtjNG_2bnbNb4lN0LkY,31220 +transformers/models/plbart/tokenization_plbart.py,sha256=ytGwNtPfgHXoKdK85Qm1fz68tBU-qbrYwdMLd96d2Xs,18910 +transformers/models/poolformer/__init__.py,sha256=FgSXHIGeF8uz-Ye67HSRQefjounknzaqm0ZCOiMj4zo,1149 +transformers/models/poolformer/__pycache__/__init__.cpython-313.pyc,, +transformers/models/poolformer/__pycache__/configuration_poolformer.cpython-313.pyc,, +transformers/models/poolformer/__pycache__/feature_extraction_poolformer.cpython-313.pyc,, +transformers/models/poolformer/__pycache__/image_processing_poolformer.cpython-313.pyc,, +transformers/models/poolformer/__pycache__/image_processing_poolformer_fast.cpython-313.pyc,, +transformers/models/poolformer/__pycache__/modeling_poolformer.cpython-313.pyc,, +transformers/models/poolformer/configuration_poolformer.py,sha256=mU4fQSyfdSwP-vB3UIAkNuYI6wyqhxu2R3SOupiY2pc,5641 +transformers/models/poolformer/feature_extraction_poolformer.py,sha256=8NBHTCScDnuQAjwNVL1Mxs4xllp9FnJCSonL_ceF_lg,1332 +transformers/models/poolformer/image_processing_poolformer.py,sha256=74zcQtTYc9H2wxlDNEpDyAdyMVEXRrgYZ87keKgLbJ4,17942 +transformers/models/poolformer/image_processing_poolformer_fast.py,sha256=g802FsnPFcYBaiNRx0WTfN98vi-HDVwBLWuIfOvwGdA,10315 +transformers/models/poolformer/modeling_poolformer.py,sha256=x8Yy0-jihy_4u9M0PWWBheEkNbd6E3OeSzOtfbwsV9o,14839 +transformers/models/pop2piano/__init__.py,sha256=I2PPcFi-p0X5py7dLqobymv3E9g-mUv1QRn0luyPlIk,999 +transformers/models/pop2piano/__pycache__/__init__.cpython-313.pyc,, +transformers/models/pop2piano/__pycache__/configuration_pop2piano.cpython-313.pyc,, +transformers/models/pop2piano/__pycache__/feature_extraction_pop2piano.cpython-313.pyc,, +transformers/models/pop2piano/__pycache__/modeling_pop2piano.cpython-313.pyc,, +transformers/models/pop2piano/__pycache__/processing_pop2piano.cpython-313.pyc,, +transformers/models/pop2piano/__pycache__/tokenization_pop2piano.cpython-313.pyc,, +transformers/models/pop2piano/configuration_pop2piano.py,sha256=dd4YFUQuLykahr1WPmw096ZXZEXpRMkpIrpmeP5JRN0,5951 +transformers/models/pop2piano/feature_extraction_pop2piano.py,sha256=DQb7Y7mxkvYfwDbZQ-CQYzdOQ9mX17D-lhcsMMW4538,19974 +transformers/models/pop2piano/modeling_pop2piano.py,sha256=jg4y0buKJWSS-8x9jjEyvfPTSfwGpDxdY_oVqP8LeBg,63188 +transformers/models/pop2piano/processing_pop2piano.py,sha256=Fdvmle3D5Clhgx6RMjXE2RknTvcNahWWIUo8PltF50c,5399 +transformers/models/pop2piano/tokenization_pop2piano.py,sha256=ntxNcv0GFLzDpy6eHDCqDsjD8GXKiQUxmgXZiE3iIoE,32803 +transformers/models/prompt_depth_anything/__init__.py,sha256=KjyIVKAGtL4B0RRW4s5WyyFNmvimW_Cv36LkiyuPbGA,1333 +transformers/models/prompt_depth_anything/__pycache__/__init__.cpython-313.pyc,, +transformers/models/prompt_depth_anything/__pycache__/configuration_prompt_depth_anything.cpython-313.pyc,, +transformers/models/prompt_depth_anything/__pycache__/image_processing_prompt_depth_anything.cpython-313.pyc,, +transformers/models/prompt_depth_anything/__pycache__/image_processing_prompt_depth_anything_fast.cpython-313.pyc,, +transformers/models/prompt_depth_anything/__pycache__/modeling_prompt_depth_anything.cpython-313.pyc,, +transformers/models/prompt_depth_anything/__pycache__/modular_prompt_depth_anything.cpython-313.pyc,, +transformers/models/prompt_depth_anything/configuration_prompt_depth_anything.py,sha256=vL-HwgUR7moy5eBRTH0t1qPDduSQ40J9F6i9rpNZG-Y,9062 +transformers/models/prompt_depth_anything/image_processing_prompt_depth_anything.py,sha256=HdE7-txaofl5a9NIhsoH76oSCF2wLJ7Pay5_a9VN15k,24748 +transformers/models/prompt_depth_anything/image_processing_prompt_depth_anything_fast.py,sha256=uCI3WpEHXNBv_rdY2XOTKU48neu7ZFNUwj8TX1CJ4JU,14136 +transformers/models/prompt_depth_anything/modeling_prompt_depth_anything.py,sha256=kpI2MyI32F-vI3I412QykOXfNIJHBaqpVJ13uubaaOg,20407 +transformers/models/prompt_depth_anything/modular_prompt_depth_anything.py,sha256=wP11VynfmVInYHvH4GeJDd1ELl7C7nj8evmivgntkDQ,13755 +transformers/models/prophetnet/__init__.py,sha256=TYI21JDlj449kTgKAOtUBpuxVv5L_I70CDjofSZ627M,1044 +transformers/models/prophetnet/__pycache__/__init__.cpython-313.pyc,, +transformers/models/prophetnet/__pycache__/configuration_prophetnet.cpython-313.pyc,, +transformers/models/prophetnet/__pycache__/modeling_prophetnet.cpython-313.pyc,, +transformers/models/prophetnet/__pycache__/tokenization_prophetnet.cpython-313.pyc,, +transformers/models/prophetnet/configuration_prophetnet.py,sha256=XPcB7itQbCqMzONUd3-4Ag6wtBHC4f3-jWb6OUTyxoQ,8893 +transformers/models/prophetnet/modeling_prophetnet.py,sha256=uwcgn6o4uepCjsGgEbEm28zkzbQvcvD4bBv-fk8QvGo,97193 +transformers/models/prophetnet/tokenization_prophetnet.py,sha256=JsP4EwckyDeORtcAsVlrNlfLXo_YriRbiZwLcKEzFDI,20154 +transformers/models/pvt/__init__.py,sha256=-4ajQRrz2cTp2czAd6D23yxShatfUpHzZrHyyLRsku0,1072 +transformers/models/pvt/__pycache__/__init__.cpython-313.pyc,, +transformers/models/pvt/__pycache__/configuration_pvt.cpython-313.pyc,, +transformers/models/pvt/__pycache__/image_processing_pvt.cpython-313.pyc,, +transformers/models/pvt/__pycache__/image_processing_pvt_fast.cpython-313.pyc,, +transformers/models/pvt/__pycache__/modeling_pvt.cpython-313.pyc,, +transformers/models/pvt/configuration_pvt.py,sha256=NivJRKXgMQ-F4SOqf7Z3nFNWxJKdsV6iqJ2YdVvrtj0,6983 +transformers/models/pvt/image_processing_pvt.py,sha256=-A6Q9_jrqBhiVIi3Mh3bNJXOxXLzOXhI_qtzVqL5zVE,13882 +transformers/models/pvt/image_processing_pvt_fast.py,sha256=C2hhtB4aUqbLdVawDaeubuef9fSkC-oNT0c4WJK8Ja0,1341 +transformers/models/pvt/modeling_pvt.py,sha256=ir6AztE-05jFjPlrJIhkCBObsxpz7OJ-s92Y2ymBfaw,24686 +transformers/models/pvt_v2/__init__.py,sha256=LkmqeLd7cZGKTFX_2d9_jU0sj_bDlML042kr_vMJTLw,993 +transformers/models/pvt_v2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/pvt_v2/__pycache__/configuration_pvt_v2.cpython-313.pyc,, +transformers/models/pvt_v2/__pycache__/modeling_pvt_v2.cpython-313.pyc,, +transformers/models/pvt_v2/configuration_pvt_v2.py,sha256=7LMMRQUgb_xYvWFkGtLxA6k_a10cBFUtSx8vejJup38,7978 +transformers/models/pvt_v2/modeling_pvt_v2.py,sha256=bTon7gNl2FolGIG5X_N6CK18vldIHUFd3Sw72CBO44s,25365 +transformers/models/qwen2/__init__.py,sha256=e49oEzErXujE0UVl_q_agf5XHzHES4vV2kLwmqdk2kg,1095 +transformers/models/qwen2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/qwen2/__pycache__/configuration_qwen2.cpython-313.pyc,, +transformers/models/qwen2/__pycache__/modeling_qwen2.cpython-313.pyc,, +transformers/models/qwen2/__pycache__/modular_qwen2.cpython-313.pyc,, +transformers/models/qwen2/__pycache__/tokenization_qwen2.cpython-313.pyc,, +transformers/models/qwen2/__pycache__/tokenization_qwen2_fast.cpython-313.pyc,, +transformers/models/qwen2/configuration_qwen2.py,sha256=K50KA_7u9zSuRR7uOBrRXe78AqtLi5V9h4A_tYlOm9M,11400 +transformers/models/qwen2/modeling_qwen2.py,sha256=CHQqfIKocTvCL2MvAcl7fso-8OlHGA59bJvuxIl8T_g,21623 +transformers/models/qwen2/modular_qwen2.py,sha256=UXCwF9Ws9vMK8cqme4BKQ5zEDhdVLruMi3n26VJ3cdE,9500 +transformers/models/qwen2/tokenization_qwen2.py,sha256=I_BWl_yvJv5eMoq69STwEFEKK59LouLtydygpAFaCaI,13935 +transformers/models/qwen2/tokenization_qwen2_fast.py,sha256=ECWjuGUmKDvYakR_D-LZkdCXdbMtP9zCM8nkR7BhEEk,5210 +transformers/models/qwen2_5_omni/__init__.py,sha256=YEDAlOoWmhkZ4L6lxmlVqVhe5A0P6aVSJNSziEFSN4E,1071 +transformers/models/qwen2_5_omni/__pycache__/__init__.cpython-313.pyc,, +transformers/models/qwen2_5_omni/__pycache__/configuration_qwen2_5_omni.cpython-313.pyc,, +transformers/models/qwen2_5_omni/__pycache__/modeling_qwen2_5_omni.cpython-313.pyc,, +transformers/models/qwen2_5_omni/__pycache__/modular_qwen2_5_omni.cpython-313.pyc,, +transformers/models/qwen2_5_omni/__pycache__/processing_qwen2_5_omni.cpython-313.pyc,, +transformers/models/qwen2_5_omni/configuration_qwen2_5_omni.py,sha256=h4nJZjnS_Jed3OqGzEVH56tEvkx0JsrK4I36_hkMDsY,52838 +transformers/models/qwen2_5_omni/modeling_qwen2_5_omni.py,sha256=gwhVB40MCxWaRwQfiUKUn9Gm79OcYhJ8E4fMO4QFj6M,177393 +transformers/models/qwen2_5_omni/modular_qwen2_5_omni.py,sha256=jhG7Cdm3bRkHBOG2pz2gERVj_DN41MJ5z9ShOXEbf1s,191129 +transformers/models/qwen2_5_omni/processing_qwen2_5_omni.py,sha256=CEBbeuYqjcQSqMl1cq2dFi0LldzJ1IlS25c3g_XilFg,17110 +transformers/models/qwen2_5_vl/__init__.py,sha256=8-dsgLIeeE3n90n6F0XOu-tBZ-80Wotz89pjZi5GqjQ,1065 +transformers/models/qwen2_5_vl/__pycache__/__init__.cpython-313.pyc,, +transformers/models/qwen2_5_vl/__pycache__/configuration_qwen2_5_vl.cpython-313.pyc,, +transformers/models/qwen2_5_vl/__pycache__/modeling_qwen2_5_vl.cpython-313.pyc,, +transformers/models/qwen2_5_vl/__pycache__/modular_qwen2_5_vl.cpython-313.pyc,, +transformers/models/qwen2_5_vl/__pycache__/processing_qwen2_5_vl.cpython-313.pyc,, +transformers/models/qwen2_5_vl/configuration_qwen2_5_vl.py,sha256=-btdhPGM_bUG2EOctbDweSDkHStNk6OUURPCNZz6SMs,18476 +transformers/models/qwen2_5_vl/modeling_qwen2_5_vl.py,sha256=9QUVIzABRfZt4IB2XAa9gIA1DBVpLAltLDjnP-nQMJk,82355 +transformers/models/qwen2_5_vl/modular_qwen2_5_vl.py,sha256=8T5rkMDA9BKurCAWBm5l57ktOWxBWYgM3QSRSWmkDog,49601 +transformers/models/qwen2_5_vl/processing_qwen2_5_vl.py,sha256=aogQe4UyDKnuEVRzFTDbWEugSKEq_x66sve9x3BiGE0,14907 +transformers/models/qwen2_audio/__init__.py,sha256=KaUmP3FK3GdeWvbunzyp1QjBki0USS4E80NlvhaJ3D8,1045 +transformers/models/qwen2_audio/__pycache__/__init__.cpython-313.pyc,, +transformers/models/qwen2_audio/__pycache__/configuration_qwen2_audio.cpython-313.pyc,, +transformers/models/qwen2_audio/__pycache__/modeling_qwen2_audio.cpython-313.pyc,, +transformers/models/qwen2_audio/__pycache__/processing_qwen2_audio.cpython-313.pyc,, +transformers/models/qwen2_audio/configuration_qwen2_audio.py,sha256=U37sA7O8GvbrlFOwXgBSvqu28Kz2uKJORqjthqH8UnE,8673 +transformers/models/qwen2_audio/modeling_qwen2_audio.py,sha256=-gMWHKbcAT5rILYmNVypl9kkWEiKOVYKRY6waW6poUQ,41729 +transformers/models/qwen2_audio/processing_qwen2_audio.py,sha256=4Q38fvhIEKQLIE435h7c9bGlCAi3d5qh3CU5OTMlwas,11312 +transformers/models/qwen2_moe/__init__.py,sha256=TZM20WtUr1UyV-hDDgq5B-qFT4aUulMpjWwSUNdUs2w,999 +transformers/models/qwen2_moe/__pycache__/__init__.cpython-313.pyc,, +transformers/models/qwen2_moe/__pycache__/configuration_qwen2_moe.cpython-313.pyc,, +transformers/models/qwen2_moe/__pycache__/modeling_qwen2_moe.cpython-313.pyc,, +transformers/models/qwen2_moe/configuration_qwen2_moe.py,sha256=3BBfpz3Pu3DuaCuTtWaFdY_JK5QrLVWrW-Ri-Ky205I,13228 +transformers/models/qwen2_moe/modeling_qwen2_moe.py,sha256=LGeqEeHF9qDHi0qCanRAqV5W1FbQgauvXvcqsvYetrg,54846 +transformers/models/qwen2_vl/__init__.py,sha256=MtNDD6sEQws-WTLwPxUL5UNd-UyDPrDh8yWzIAsRp-U,1131 +transformers/models/qwen2_vl/__pycache__/__init__.cpython-313.pyc,, +transformers/models/qwen2_vl/__pycache__/configuration_qwen2_vl.cpython-313.pyc,, +transformers/models/qwen2_vl/__pycache__/image_processing_qwen2_vl.cpython-313.pyc,, +transformers/models/qwen2_vl/__pycache__/image_processing_qwen2_vl_fast.cpython-313.pyc,, +transformers/models/qwen2_vl/__pycache__/modeling_qwen2_vl.cpython-313.pyc,, +transformers/models/qwen2_vl/__pycache__/processing_qwen2_vl.cpython-313.pyc,, +transformers/models/qwen2_vl/__pycache__/video_processing_qwen2_vl.cpython-313.pyc,, +transformers/models/qwen2_vl/configuration_qwen2_vl.py,sha256=_CztMUvUbM7wHyWeugM_UFUuSk0K-5mU8UZhwtQky0c,17106 +transformers/models/qwen2_vl/image_processing_qwen2_vl.py,sha256=eCCg_MoQfnVgXgjZt3QoXKKwMW-Fe8AiXHeXlHBez08,26381 +transformers/models/qwen2_vl/image_processing_qwen2_vl_fast.py,sha256=Cb-psX33w_DGFZvDQAjuUPIdJHLNW65-XCG6HKE6Qjw,12723 +transformers/models/qwen2_vl/modeling_qwen2_vl.py,sha256=8y_bho4BRBTzvKL8yOcWX8nkm1fpRptPOvmiQ2QK4Ps,74168 +transformers/models/qwen2_vl/processing_qwen2_vl.py,sha256=hjoTvQrQDX1ADMe7q0RnSF47kdMoGbYBvMRX35HSeOU,12801 +transformers/models/qwen2_vl/video_processing_qwen2_vl.py,sha256=MwNHWyxwShfI0CVu3jx54_8J-kNXW_kMfeQbcpWHhXk,13813 +transformers/models/qwen3/__init__.py,sha256=5JU8uO9x0AmJ-YjY36MxtbMKT_B38dLJkrnAwLyjcTY,1014 +transformers/models/qwen3/__pycache__/__init__.cpython-313.pyc,, +transformers/models/qwen3/__pycache__/configuration_qwen3.cpython-313.pyc,, +transformers/models/qwen3/__pycache__/modeling_qwen3.cpython-313.pyc,, +transformers/models/qwen3/__pycache__/modular_qwen3.cpython-313.pyc,, +transformers/models/qwen3/configuration_qwen3.py,sha256=J4Y-lxj9vImfLQ5WdiHk09NtjcUAwdVLSdukJC0I0r0,11810 +transformers/models/qwen3/modeling_qwen3.py,sha256=2W7upMAAq6pwp8AndDfQhDtbLQBasjPg_8r4YKNX8ao,23069 +transformers/models/qwen3/modular_qwen3.py,sha256=WA_tIiC8UOyrxynwkbPQl-a8zRTd6bPbVkD5jK9irVs,6199 +transformers/models/qwen3_moe/__init__.py,sha256=q5WfIniJecmOju3Lhy277H3Puu7viwc9vUhUWen3UZY,999 +transformers/models/qwen3_moe/__pycache__/__init__.cpython-313.pyc,, +transformers/models/qwen3_moe/__pycache__/configuration_qwen3_moe.cpython-313.pyc,, +transformers/models/qwen3_moe/__pycache__/modeling_qwen3_moe.cpython-313.pyc,, +transformers/models/qwen3_moe/__pycache__/modular_qwen3_moe.cpython-313.pyc,, +transformers/models/qwen3_moe/configuration_qwen3_moe.py,sha256=DQzCSiwo1SfPrEUr_wJV14N8qM7iVAIuhGDIt7BZPHw,12862 +transformers/models/qwen3_moe/modeling_qwen3_moe.py,sha256=3w3ZQrmjk6mZ0hpgZP1hyb4Ca0mkfRPS9ZKjJYBDz6c,32146 +transformers/models/qwen3_moe/modular_qwen3_moe.py,sha256=tSjWubuVGbJdWoYbvrTBV2Wh2agahUdIDXC0FiTmiqY,12091 +transformers/models/qwen3_next/__init__.py,sha256=PuPvF5xcEfBxUKjqiZaWCiHeDeKuTmNdfuI6wvb-cbI,1001 +transformers/models/qwen3_next/__pycache__/__init__.cpython-313.pyc,, +transformers/models/qwen3_next/__pycache__/configuration_qwen3_next.cpython-313.pyc,, +transformers/models/qwen3_next/__pycache__/modeling_qwen3_next.cpython-313.pyc,, +transformers/models/qwen3_next/__pycache__/modular_qwen3_next.cpython-313.pyc,, +transformers/models/qwen3_next/configuration_qwen3_next.py,sha256=yZ0mSSnmJ2A9HkBPdrAsmh832ORtrlwkK5qRfd5VxHE,14521 +transformers/models/qwen3_next/modeling_qwen3_next.py,sha256=a4yIhl__ykn-rdHn0t8HmYbhlvkOcbwJhB-IUoQFGPk,56303 +transformers/models/qwen3_next/modular_qwen3_next.py,sha256=JDbCeNLr2NBtmGf90sdPDl1eJFoCQ8J4n56ErvdVMa8,36542 +transformers/models/qwen3_omni_moe/__init__.py,sha256=6xE3okskjamaUt4t3k8qJeJWbTPvLH7MfEjmtKtCC3I,1077 +transformers/models/qwen3_omni_moe/__pycache__/__init__.cpython-313.pyc,, +transformers/models/qwen3_omni_moe/__pycache__/configuration_qwen3_omni_moe.cpython-313.pyc,, +transformers/models/qwen3_omni_moe/__pycache__/modeling_qwen3_omni_moe.cpython-313.pyc,, +transformers/models/qwen3_omni_moe/__pycache__/modular_qwen3_omni_moe.cpython-313.pyc,, +transformers/models/qwen3_omni_moe/__pycache__/processing_qwen3_omni_moe.cpython-313.pyc,, +transformers/models/qwen3_omni_moe/configuration_qwen3_omni_moe.py,sha256=d5v0uBZCVEjV06L_pXko0ce3wQ4tZrCTcfzq7mK8v4k,64262 +transformers/models/qwen3_omni_moe/modeling_qwen3_omni_moe.py,sha256=4zJCMkp0NOc4-K0geHbip02VhvHuwm4TmUHjGmHIrmw,182418 +transformers/models/qwen3_omni_moe/modular_qwen3_omni_moe.py,sha256=rn17GOgBIwoeylblfXpVUreH8XXNWvOTBqyWpLJZLJw,118939 +transformers/models/qwen3_omni_moe/processing_qwen3_omni_moe.py,sha256=mnbS3YQij9_lIAgI_kbR-9eIRFCUS17h8_6KiwPK2D8,17321 +transformers/models/qwen3_vl/__init__.py,sha256=abVaeHwwKgL-3gVI3c5PSZfroZlU4TxEKXweDd75BXQ,1104 +transformers/models/qwen3_vl/__pycache__/__init__.cpython-313.pyc,, +transformers/models/qwen3_vl/__pycache__/configuration_qwen3_vl.cpython-313.pyc,, +transformers/models/qwen3_vl/__pycache__/modeling_qwen3_vl.cpython-313.pyc,, +transformers/models/qwen3_vl/__pycache__/modular_qwen3_vl.cpython-313.pyc,, +transformers/models/qwen3_vl/__pycache__/processing_qwen3_vl.cpython-313.pyc,, +transformers/models/qwen3_vl/__pycache__/video_processing_qwen3_vl.cpython-313.pyc,, +transformers/models/qwen3_vl/configuration_qwen3_vl.py,sha256=F3_QpNwbCDB8CMpyzya419wCirarWXW_ZQ_BSrgTKoM,14827 +transformers/models/qwen3_vl/modeling_qwen3_vl.py,sha256=aFsQy8SFVjQ99OFZGZxYhNAtua0idkDw9PvnXd8LKpk,70883 +transformers/models/qwen3_vl/modular_qwen3_vl.py,sha256=ETqnKogl5PcMEZFv0XlhOHwe2DMurZLcOTw3zLlTg0s,68838 +transformers/models/qwen3_vl/processing_qwen3_vl.py,sha256=79jWSq9giq0f-z5tUD1qmeUifQB9-Vwdn6kF2ZjNpKk,17149 +transformers/models/qwen3_vl/video_processing_qwen3_vl.py,sha256=pJqD_EXNm6o-ORpor3l0PHDUe4bX1sgaJi_aQQT4nbc,11561 +transformers/models/qwen3_vl_moe/__init__.py,sha256=kZX59YLK-ZPItl-fKkUko_3uNM_QthuwE-rfzurn9iY,1028 +transformers/models/qwen3_vl_moe/__pycache__/__init__.cpython-313.pyc,, +transformers/models/qwen3_vl_moe/__pycache__/configuration_qwen3_vl_moe.cpython-313.pyc,, +transformers/models/qwen3_vl_moe/__pycache__/modeling_qwen3_vl_moe.cpython-313.pyc,, +transformers/models/qwen3_vl_moe/__pycache__/modular_qwen3_vl_moe.cpython-313.pyc,, +transformers/models/qwen3_vl_moe/configuration_qwen3_vl_moe.py,sha256=q6GXqyZ9B3tt-qmXd8zCIP13IOrv52l2bOMfsnAGkDI,17361 +transformers/models/qwen3_vl_moe/modeling_qwen3_vl_moe.py,sha256=uG91P3P73pyKalTzKDu0DDqrUJyd3Sc1KrPD8zBnX4M,84559 +transformers/models/qwen3_vl_moe/modular_qwen3_vl_moe.py,sha256=Lflr0PnyDE7zlLlZloo0vd1EhJN-nX7-5h477tOLU-U,26298 +transformers/models/rag/__init__.py,sha256=89sLlT4QJ96h0U-X6FmTdfSNJ8NjDjTpqyI1yK0L1Cw,1091 +transformers/models/rag/__pycache__/__init__.cpython-313.pyc,, +transformers/models/rag/__pycache__/configuration_rag.cpython-313.pyc,, +transformers/models/rag/__pycache__/modeling_rag.cpython-313.pyc,, +transformers/models/rag/__pycache__/modeling_tf_rag.cpython-313.pyc,, +transformers/models/rag/__pycache__/retrieval_rag.cpython-313.pyc,, +transformers/models/rag/__pycache__/tokenization_rag.cpython-313.pyc,, +transformers/models/rag/configuration_rag.py,sha256=dFbQO0qhT-mKYoTEmZAXlpwHSHaE4CVWNGcY7D7_yGo,8523 +transformers/models/rag/modeling_rag.py,sha256=48nUcFNh8KzGbU0r62SWaPDXwrZZbThO6nHTrfgdcE8,89183 +transformers/models/rag/modeling_tf_rag.py,sha256=bVwrivZEwtaAeuegzXIPoCC6sWcYuzYB4LGIiUaDLkQ,89318 +transformers/models/rag/retrieval_rag.py,sha256=oGsrPV81UHw2m6wIXVC7WcW8SD_lGl40L4rI5wLTwKs,30069 +transformers/models/rag/tokenization_rag.py,sha256=5UVTej-039v54SV8nC9StpNMSFMIxPCqo0srnrVsnKA,4610 +transformers/models/recurrent_gemma/__init__.py,sha256=i86Cydx-eAdwsVMjNc0yG9hGxe_amyfAdvF5Eg-UCGM,1011 +transformers/models/recurrent_gemma/__pycache__/__init__.cpython-313.pyc,, +transformers/models/recurrent_gemma/__pycache__/configuration_recurrent_gemma.cpython-313.pyc,, +transformers/models/recurrent_gemma/__pycache__/modeling_recurrent_gemma.cpython-313.pyc,, +transformers/models/recurrent_gemma/configuration_recurrent_gemma.py,sha256=w0mD1rnokEkBuvDNCW0mMJlO0DsF0TuG2JyJSmdqGmI,7750 +transformers/models/recurrent_gemma/modeling_recurrent_gemma.py,sha256=YY4RJaHlVmMv7KABCRyx7hyrE5_qcfHrHo2_uZ25PWM,35964 +transformers/models/reformer/__init__.py,sha256=zjiMjHIRPssQ8pVa4fQ0zMCCn0ee_mtJt6wc9J23QYQ,1084 +transformers/models/reformer/__pycache__/__init__.cpython-313.pyc,, +transformers/models/reformer/__pycache__/configuration_reformer.cpython-313.pyc,, +transformers/models/reformer/__pycache__/modeling_reformer.cpython-313.pyc,, +transformers/models/reformer/__pycache__/tokenization_reformer.cpython-313.pyc,, +transformers/models/reformer/__pycache__/tokenization_reformer_fast.cpython-313.pyc,, +transformers/models/reformer/configuration_reformer.py,sha256=MnTplogKNnkWOIGQHLRx5qmrZOBqIqXfeyLzZPy89IA,13196 +transformers/models/reformer/modeling_reformer.py,sha256=rfqhDMNL0zBOsampdV4TGTWlA08-5yNyuN8uCwrT7FU,118180 +transformers/models/reformer/tokenization_reformer.py,sha256=B5EhgmnvgvW8NiLWDq198Mh7IqUmnDYVUKoh0ECgbD4,6823 +transformers/models/reformer/tokenization_reformer_fast.py,sha256=Ow1TJe2MIatlbk0fYAfAZySEfPfWUpaAahzJvDrnAMQ,4137 +transformers/models/regnet/__init__.py,sha256=X_FU3wnZJ5KkCmRi4EyHk6ZUm_f0--YyyTS8lrknS9Y,1071 +transformers/models/regnet/__pycache__/__init__.cpython-313.pyc,, +transformers/models/regnet/__pycache__/configuration_regnet.cpython-313.pyc,, +transformers/models/regnet/__pycache__/modeling_flax_regnet.cpython-313.pyc,, +transformers/models/regnet/__pycache__/modeling_regnet.cpython-313.pyc,, +transformers/models/regnet/__pycache__/modeling_tf_regnet.cpython-313.pyc,, +transformers/models/regnet/configuration_regnet.py,sha256=TgYggQiYssFjcXjzLIe5ZDVrMxP4qQl1ZpmvZhLi2Ig,3974 +transformers/models/regnet/modeling_flax_regnet.py,sha256=ov6SXyXtkFXwNSC9quWte1vMl6AZkeJ49hlqp0l171k,28519 +transformers/models/regnet/modeling_regnet.py,sha256=9z0akvcr2eApFWLfTs8ZuLoMfLi_XgbjU0YGnULudHY,14166 +transformers/models/regnet/modeling_tf_regnet.py,sha256=Rzy-36Qzqu1wRp_qYZAfEj-5sZycNEgWQE210EBGmD8,24394 +transformers/models/rembert/__init__.py,sha256=Gif9TX1kvmD5iVWqsViSjxKYIDhR3FiBfp_QfA7U7i4,1119 +transformers/models/rembert/__pycache__/__init__.cpython-313.pyc,, +transformers/models/rembert/__pycache__/configuration_rembert.cpython-313.pyc,, +transformers/models/rembert/__pycache__/modeling_rembert.cpython-313.pyc,, +transformers/models/rembert/__pycache__/modeling_tf_rembert.cpython-313.pyc,, +transformers/models/rembert/__pycache__/tokenization_rembert.cpython-313.pyc,, +transformers/models/rembert/__pycache__/tokenization_rembert_fast.cpython-313.pyc,, +transformers/models/rembert/configuration_rembert.py,sha256=wSBV6VLEvcKMvP2PZw6KgaqelYzyIWhK_NZDV8kzX_8,7300 +transformers/models/rembert/modeling_rembert.py,sha256=jdb2SvVe4ZrUIV8Cspq0u_hoCjrAals-MGiFd0kKbNI,58413 +transformers/models/rembert/modeling_tf_rembert.py,sha256=ChyRVbTldGrTuWayyNLjm91mS4lOK25Q4SAuNTF5Fzo,77771 +transformers/models/rembert/tokenization_rembert.py,sha256=K_x4GpkaWMCipug1ojjPMKyiPMk-JwJSXBfXiLYIWm0,9566 +transformers/models/rembert/tokenization_rembert_fast.py,sha256=7T60RZ34azXx0zfaxE8Qh-4IJOYx7M-j59QDQO5BDNE,8747 +transformers/models/resnet/__init__.py,sha256=NCgMoczDbEI_XDWkWNWKIKGPYeohOC95f0o2X-Vh2vA,1071 +transformers/models/resnet/__pycache__/__init__.cpython-313.pyc,, +transformers/models/resnet/__pycache__/configuration_resnet.cpython-313.pyc,, +transformers/models/resnet/__pycache__/modeling_flax_resnet.cpython-313.pyc,, +transformers/models/resnet/__pycache__/modeling_resnet.cpython-313.pyc,, +transformers/models/resnet/__pycache__/modeling_tf_resnet.cpython-313.pyc,, +transformers/models/resnet/configuration_resnet.py,sha256=RUWWvz_KwilBJVOlaY1cK0CN078VPXPwzlOs2jZmd6I,6076 +transformers/models/resnet/modeling_flax_resnet.py,sha256=2r5isBIaKQIcGeDKWFtwbsgatGZDTXu0rpEKVbeT5xE,24708 +transformers/models/resnet/modeling_resnet.py,sha256=VhLrEx4CHIk-d-sj_eR_s1g5vbZcRo-oYYILNdxF01s,16120 +transformers/models/resnet/modeling_tf_resnet.py,sha256=FS0VNZqUMq7sXABV-GE3stcUpYd2bVUGVNh2xWLJdro,23774 +transformers/models/roberta/__init__.py,sha256=p1qYu_9qpmxsxMfXuoxK-VrmRQMEshwiM8Ekoij2J1M,1160 +transformers/models/roberta/__pycache__/__init__.cpython-313.pyc,, +transformers/models/roberta/__pycache__/configuration_roberta.cpython-313.pyc,, +transformers/models/roberta/__pycache__/modeling_flax_roberta.cpython-313.pyc,, +transformers/models/roberta/__pycache__/modeling_roberta.cpython-313.pyc,, +transformers/models/roberta/__pycache__/modeling_tf_roberta.cpython-313.pyc,, +transformers/models/roberta/__pycache__/tokenization_roberta.cpython-313.pyc,, +transformers/models/roberta/__pycache__/tokenization_roberta_fast.cpython-313.pyc,, +transformers/models/roberta/configuration_roberta.py,sha256=BYTjf1lo0Mk9xYqZjNHsZSlxPG7zJPcFl8ttr6LK8Ew,7336 +transformers/models/roberta/modeling_flax_roberta.py,sha256=hW7TxugQammDOpLCTZW3X3TjcJuy81AIdDztOcxzs-A,57284 +transformers/models/roberta/modeling_roberta.py,sha256=_NGbErJ3PkPieam0RUYCEbBNxW8OZzBEpAOTES2DN54,70968 +transformers/models/roberta/modeling_tf_roberta.py,sha256=5wdoYO0aaDfd5u6iwEYjSLuhp3Mti8jODh9OdXWRQt0,79951 +transformers/models/roberta/tokenization_roberta.py,sha256=FtKax5F5Cg4uJR7aWs62l9Tp0uDcVLW2dZKfrYfarrg,16469 +transformers/models/roberta/tokenization_roberta_fast.py,sha256=JYe2lmZugU3J7PEKn_SegaFURSAPLUejM6ckH_SqWmY,10978 +transformers/models/roberta_prelayernorm/__init__.py,sha256=QsVJJaoujnLHyCgwSsz53MV88vI183tTGJNXHDCHCAc,1127 +transformers/models/roberta_prelayernorm/__pycache__/__init__.cpython-313.pyc,, +transformers/models/roberta_prelayernorm/__pycache__/configuration_roberta_prelayernorm.cpython-313.pyc,, +transformers/models/roberta_prelayernorm/__pycache__/modeling_flax_roberta_prelayernorm.cpython-313.pyc,, +transformers/models/roberta_prelayernorm/__pycache__/modeling_roberta_prelayernorm.cpython-313.pyc,, +transformers/models/roberta_prelayernorm/__pycache__/modeling_tf_roberta_prelayernorm.cpython-313.pyc,, +transformers/models/roberta_prelayernorm/configuration_roberta_prelayernorm.py,sha256=YAxqjJmTFys2-TCyo4-B8Y1mm-kF2ExOqmxiSw5i4C4,7908 +transformers/models/roberta_prelayernorm/modeling_flax_roberta_prelayernorm.py,sha256=fQzWW2x-4ffASPs5V3shK4ctvdvmBlKrqmBeP6mUsEM,60941 +transformers/models/roberta_prelayernorm/modeling_roberta_prelayernorm.py,sha256=jaVRe7lS_yui9XS7XELphAxyqGybV_OLqBz0G-pp240,66826 +transformers/models/roberta_prelayernorm/modeling_tf_roberta_prelayernorm.py,sha256=Cv6jETaFoVnYC4rbko5lk3KL11Q7CuWMqaKJlCufNqs,83224 +transformers/models/roc_bert/__init__.py,sha256=4CveMGU-dY3nV4E6x-Xpb1jicRniwrPuSOrY8-SHIUI,1038 +transformers/models/roc_bert/__pycache__/__init__.cpython-313.pyc,, +transformers/models/roc_bert/__pycache__/configuration_roc_bert.cpython-313.pyc,, +transformers/models/roc_bert/__pycache__/modeling_roc_bert.cpython-313.pyc,, +transformers/models/roc_bert/__pycache__/tokenization_roc_bert.cpython-313.pyc,, +transformers/models/roc_bert/configuration_roc_bert.py,sha256=3w3t43X0ZkziGeftmlFg8yozWj57Pn5kynhbxXUkNMk,8544 +transformers/models/roc_bert/modeling_roc_bert.py,sha256=wasuviDKG6vG0R4uz52xgELoSHqjPdgCyn_ZsIV65rs,89010 +transformers/models/roc_bert/tokenization_roc_bert.py,sha256=Zo8YLFlTCOtUMFXU-et41Ktw_S61n9XRHn7DulIJ4tQ,49487 +transformers/models/roformer/__init__.py,sha256=v1CIjowYMq6aN-V9gyl-RWlMi_uQQxopuvEv76geFqk,1166 +transformers/models/roformer/__pycache__/__init__.cpython-313.pyc,, +transformers/models/roformer/__pycache__/configuration_roformer.cpython-313.pyc,, +transformers/models/roformer/__pycache__/modeling_flax_roformer.cpython-313.pyc,, +transformers/models/roformer/__pycache__/modeling_roformer.cpython-313.pyc,, +transformers/models/roformer/__pycache__/modeling_tf_roformer.cpython-313.pyc,, +transformers/models/roformer/__pycache__/tokenization_roformer.cpython-313.pyc,, +transformers/models/roformer/__pycache__/tokenization_roformer_fast.cpython-313.pyc,, +transformers/models/roformer/__pycache__/tokenization_utils.cpython-313.pyc,, +transformers/models/roformer/configuration_roformer.py,sha256=22jMpd4-nBlP7kJAlZADxFmr76zor7BRBd8orUW23go,6865 +transformers/models/roformer/modeling_flax_roformer.py,sha256=RwabkHxAdW4jApBUbJwjXESKCg-xcez9uNBAzya_xP4,39383 +transformers/models/roformer/modeling_roformer.py,sha256=lF0SdhZ8nkxqF19ph1Iy5xodfZ5lgFmlBBznY2HkXK8,65061 +transformers/models/roformer/modeling_tf_roformer.py,sha256=wevLVlaGrv6P6sP8zY3Mpg9fUsZRqb-PLRdOAT_OpWo,66049 +transformers/models/roformer/tokenization_roformer.py,sha256=Dlj00LiDLK0PSbGRlTOVpTXhYFj9yBvMqFqaEiNQSwk,20858 +transformers/models/roformer/tokenization_roformer_fast.py,sha256=BGhYYclZeX8qFSf0XOvATd9gH8rqlIqW4QCQq-umMXY,5584 +transformers/models/roformer/tokenization_utils.py,sha256=v_Qvq0uBuHpE43oIM64g9kTZcy8BD9oHhOR_ketIyIg,2625 +transformers/models/rt_detr/__init__.py,sha256=c9Y3NeKQwBP46tyFF99kjqTngoIWhLMq7XvzEJOfLaY,1181 +transformers/models/rt_detr/__pycache__/__init__.cpython-313.pyc,, +transformers/models/rt_detr/__pycache__/configuration_rt_detr.cpython-313.pyc,, +transformers/models/rt_detr/__pycache__/configuration_rt_detr_resnet.cpython-313.pyc,, +transformers/models/rt_detr/__pycache__/image_processing_rt_detr.cpython-313.pyc,, +transformers/models/rt_detr/__pycache__/image_processing_rt_detr_fast.cpython-313.pyc,, +transformers/models/rt_detr/__pycache__/modeling_rt_detr.cpython-313.pyc,, +transformers/models/rt_detr/__pycache__/modeling_rt_detr_resnet.cpython-313.pyc,, +transformers/models/rt_detr/__pycache__/modular_rt_detr.cpython-313.pyc,, +transformers/models/rt_detr/configuration_rt_detr.py,sha256=Khno06NPBiP_2Nq7_uy569OoiR6sb7vdxJ1jMVOtkCA,18295 +transformers/models/rt_detr/configuration_rt_detr_resnet.py,sha256=kBbmglFZkq0cqLsz1VZwTXVLHQnjnLjtFbkfMMbVOmM,5557 +transformers/models/rt_detr/image_processing_rt_detr.py,sha256=ayBseDPRJZX4PweZrnvR_ZIXPzo9XOdPwiZLAPGLGLA,51701 +transformers/models/rt_detr/image_processing_rt_detr_fast.py,sha256=hTOTc06ZliEDV9bkuB4iik-rBrj2f_6UvIpiT7NbxqE,24944 +transformers/models/rt_detr/modeling_rt_detr.py,sha256=W7rxYPOAwKzIAU1b1g0FLIW992bowryp1a4HOzYamnw,95382 +transformers/models/rt_detr/modeling_rt_detr_resnet.py,sha256=xlET8ddRbxBzLz2uH4ks_9wuqNPbpj-uanOd0gOp9Dg,15149 +transformers/models/rt_detr/modular_rt_detr.py,sha256=U5As4Jc341eUEmVdaRFGkeY6udhMM3PzGiseIqEbzyc,14885 +transformers/models/rt_detr_v2/__init__.py,sha256=7RL5U-hsGt3HQZ5SuWn8iZY_L166EYswBvaQXFRkzRc,1003 +transformers/models/rt_detr_v2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/rt_detr_v2/__pycache__/configuration_rt_detr_v2.cpython-313.pyc,, +transformers/models/rt_detr_v2/__pycache__/modeling_rt_detr_v2.cpython-313.pyc,, +transformers/models/rt_detr_v2/__pycache__/modular_rt_detr_v2.cpython-313.pyc,, +transformers/models/rt_detr_v2/configuration_rt_detr_v2.py,sha256=IjGQgzggWMu4uGoPEn8DHMc00lGJCdTcu2fvZ6NamQs,19837 +transformers/models/rt_detr_v2/modeling_rt_detr_v2.py,sha256=mYzHhnFEJzXvKYhNty5N5ZuS2wKluehRD7PR-ocUkT4,96324 +transformers/models/rt_detr_v2/modular_rt_detr_v2.py,sha256=oFEBxGHsltolRP9fSFLRb_4j2-N22fXAbtwPP8SL8jw,29745 +transformers/models/rwkv/__init__.py,sha256=HAiwEvW1j_xuHj_PbmN25srY9RtA1gLmN_0RWvAyG78,989 +transformers/models/rwkv/__pycache__/__init__.cpython-313.pyc,, +transformers/models/rwkv/__pycache__/configuration_rwkv.cpython-313.pyc,, +transformers/models/rwkv/__pycache__/modeling_rwkv.cpython-313.pyc,, +transformers/models/rwkv/configuration_rwkv.py,sha256=0hwiEhaLNCekxOiYD_D-e95ftq7_aazx9ImRtf0ydWc,5204 +transformers/models/rwkv/modeling_rwkv.py,sha256=EkHzVtu8p87JcT-rymah94GGnpl_vdWpK0-XGTlt47w,33588 +transformers/models/sam/__init__.py,sha256=ilUO6W284DgX2BijkzdGXaw-OZsSrEo-qjZqiidfOEY,1141 +transformers/models/sam/__pycache__/__init__.cpython-313.pyc,, +transformers/models/sam/__pycache__/configuration_sam.cpython-313.pyc,, +transformers/models/sam/__pycache__/image_processing_sam.cpython-313.pyc,, +transformers/models/sam/__pycache__/image_processing_sam_fast.cpython-313.pyc,, +transformers/models/sam/__pycache__/modeling_sam.cpython-313.pyc,, +transformers/models/sam/__pycache__/modeling_tf_sam.cpython-313.pyc,, +transformers/models/sam/__pycache__/processing_sam.cpython-313.pyc,, +transformers/models/sam/configuration_sam.py,sha256=q6C10OyYdHicWRfuaudnl_5K9I3LH8nPQj_1wXPGegw,14716 +transformers/models/sam/image_processing_sam.py,sha256=jJZT4t9zIvLgPYpXh1Eh2I19Zkw_ABMKrYZYhJI3EU4,67970 +transformers/models/sam/image_processing_sam_fast.py,sha256=ZTlVrL-tTxZuJnxRQQ2pCVPt_exhLx4zpQuvfBMTEM0,31316 +transformers/models/sam/modeling_sam.py,sha256=APIBRUGjEMfm95l7VjIMRBoepSAxwAGgnaOzjkZ_zvE,61717 +transformers/models/sam/modeling_tf_sam.py,sha256=oUYzLUHdtOgEcoS6wzkV9MDmU12UwXwTGOiAQiV-2WA,77733 +transformers/models/sam/processing_sam.py,sha256=3Cn9YnVdfmCi-HSvMubnW9vHNfOKMnzhTwQO7Fui6nk,12118 +transformers/models/sam2/__init__.py,sha256=H7LrRCnUmUZ9fj5C_p_Mp8xNB5Zdtoo1S__LcYcs_i4,1070 +transformers/models/sam2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/sam2/__pycache__/configuration_sam2.cpython-313.pyc,, +transformers/models/sam2/__pycache__/image_processing_sam2_fast.cpython-313.pyc,, +transformers/models/sam2/__pycache__/modeling_sam2.cpython-313.pyc,, +transformers/models/sam2/__pycache__/modular_sam2.cpython-313.pyc,, +transformers/models/sam2/__pycache__/processing_sam2.cpython-313.pyc,, +transformers/models/sam2/configuration_sam2.py,sha256=VqEXGJqFzfy8PhpwGn4U9O67ZwSN2fpnb3HHwZ4FWgY,20600 +transformers/models/sam2/image_processing_sam2_fast.py,sha256=7UzCnCfUF5XUfpYDziwCY32o5ONpG-noTwM92WiYuFk,30796 +transformers/models/sam2/modeling_sam2.py,sha256=ysOVyb11gZGDM2fOcWLDht746ZIwtb9CvEfIzDPn-M0,72882 +transformers/models/sam2/modular_sam2.py,sha256=cbRc8dED42Ja8FpwSBwd6MITuDmMpEoLuXd4bgZs7sc,64623 +transformers/models/sam2/processing_sam2.py,sha256=viGjbI1aZKD7tWHAWaVKTZ0xiyigctODUPJ1ESRcnVI,23039 +transformers/models/sam2_video/__init__.py,sha256=WFP70wbKsoPQuuR0Aq6sMpFZ9m2cQJwNgdCpngg4C1o,1089 +transformers/models/sam2_video/__pycache__/__init__.cpython-313.pyc,, +transformers/models/sam2_video/__pycache__/configuration_sam2_video.cpython-313.pyc,, +transformers/models/sam2_video/__pycache__/modeling_sam2_video.cpython-313.pyc,, +transformers/models/sam2_video/__pycache__/modular_sam2_video.cpython-313.pyc,, +transformers/models/sam2_video/__pycache__/processing_sam2_video.cpython-313.pyc,, +transformers/models/sam2_video/__pycache__/video_processing_sam2_video.cpython-313.pyc,, +transformers/models/sam2_video/configuration_sam2_video.py,sha256=u5wSunYToJaU9nC7dVigFo0dDkiBf8KpjWo76K9HFzo,20482 +transformers/models/sam2_video/modeling_sam2_video.py,sha256=2FroEz3fIEAHKKMvLj5Tw9fFAr4GWXAghcuPZkgLbwM,125463 +transformers/models/sam2_video/modular_sam2_video.py,sha256=MdDAnVjIz_JLwNzeAoeR166HUI9lngsAbzg8juCyREU,117324 +transformers/models/sam2_video/processing_sam2_video.py,sha256=3qlms1YII2vownAjpjVhwV7IuiMS7ikYScCFzZk-HeA,38301 +transformers/models/sam2_video/video_processing_sam2_video.py,sha256=p1CaA_NGuAePgso2n0hKQuoizYNxvX6zafSlz65J7dg,4899 +transformers/models/sam_hq/__init__.py,sha256=DtfMcRDroMaiZ9FKrgymx4rCyGuP5r1dxr-wzjS0T0Q,1029 +transformers/models/sam_hq/__pycache__/__init__.cpython-313.pyc,, +transformers/models/sam_hq/__pycache__/configuration_sam_hq.cpython-313.pyc,, +transformers/models/sam_hq/__pycache__/modeling_sam_hq.cpython-313.pyc,, +transformers/models/sam_hq/__pycache__/modular_sam_hq.cpython-313.pyc,, +transformers/models/sam_hq/__pycache__/processing_samhq.cpython-313.pyc,, +transformers/models/sam_hq/configuration_sam_hq.py,sha256=U3nPcw9lh2M6MdBMe7rK_xniKVPVoRKzgqvPgU-7kzo,14840 +transformers/models/sam_hq/modeling_sam_hq.py,sha256=q5dW1MJqDv9yNpFcDXPrOXG4DRqAbqO-p0za1F1i1iE,69300 +transformers/models/sam_hq/modular_sam_hq.py,sha256=6NxL8hQd7wigyRqr5Z2Ci7f_G8Wnv-_xIdty-les2m4,31609 +transformers/models/sam_hq/processing_samhq.py,sha256=5audIn1A3v9gU6zM90o8oLF5YM_PA3QWR-3aFReFcFA,12020 +transformers/models/seamless_m4t/__init__.py,sha256=Y5c_W1E83fh8ToTMqF4NcReXzKZiTDv3A4ePoNUxXDg,1194 +transformers/models/seamless_m4t/__pycache__/__init__.cpython-313.pyc,, +transformers/models/seamless_m4t/__pycache__/configuration_seamless_m4t.cpython-313.pyc,, +transformers/models/seamless_m4t/__pycache__/feature_extraction_seamless_m4t.cpython-313.pyc,, +transformers/models/seamless_m4t/__pycache__/modeling_seamless_m4t.cpython-313.pyc,, +transformers/models/seamless_m4t/__pycache__/processing_seamless_m4t.cpython-313.pyc,, +transformers/models/seamless_m4t/__pycache__/tokenization_seamless_m4t.cpython-313.pyc,, +transformers/models/seamless_m4t/__pycache__/tokenization_seamless_m4t_fast.cpython-313.pyc,, +transformers/models/seamless_m4t/configuration_seamless_m4t.py,sha256=zvCh7r-KOILwOlujAaJLtv1-Z7B8XgZEfxqYuBMjGK0,23521 +transformers/models/seamless_m4t/feature_extraction_seamless_m4t.py,sha256=VeMODyxbmdHC7O2gb_VOzXO8_cz7gktCWEYIVPmscWQ,13628 +transformers/models/seamless_m4t/modeling_seamless_m4t.py,sha256=gc96yE1P9dMRaKxfgUuYdIfFxz0tnUudRjDTHn1laO0,187012 +transformers/models/seamless_m4t/processing_seamless_m4t.py,sha256=ZoZ_rJ0G2UNhLDQ4INAjnmB1BZWHtEBsLjGJxjIvACg,5039 +transformers/models/seamless_m4t/tokenization_seamless_m4t.py,sha256=vRb1fQgr6HzLT5rnhE7dDqAuC5_yT5DCIftiBSZIIak,26076 +transformers/models/seamless_m4t/tokenization_seamless_m4t_fast.py,sha256=NC5GbGQnDiLf9NB6-XQ4DqJjLFQWJ7_0yJXoDthY-z0,19774 +transformers/models/seamless_m4t_v2/__init__.py,sha256=mMY04PBMrOwTIQLq01RHqZjssvrSYl3UDhP5Y5vFifs,1011 +transformers/models/seamless_m4t_v2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/seamless_m4t_v2/__pycache__/configuration_seamless_m4t_v2.cpython-313.pyc,, +transformers/models/seamless_m4t_v2/__pycache__/modeling_seamless_m4t_v2.cpython-313.pyc,, +transformers/models/seamless_m4t_v2/configuration_seamless_m4t_v2.py,sha256=ihF9DjqhoOojebAUTijxbQuLNs0nEqJBi9umyR-gHgA,24388 +transformers/models/seamless_m4t_v2/modeling_seamless_m4t_v2.py,sha256=gcA91JUtgk5WTM-Tt8FJXiaLZLUyKnbQ2Y7cokbEcRk,206172 +transformers/models/seed_oss/__init__.py,sha256=ukqpfG-W3jFJvA9UiL5bI_2jl_CTZFynUL0Tq9WD63M,1025 +transformers/models/seed_oss/__pycache__/__init__.cpython-313.pyc,, +transformers/models/seed_oss/__pycache__/configuration_seed_oss.cpython-313.pyc,, +transformers/models/seed_oss/__pycache__/modeling_seed_oss.cpython-313.pyc,, +transformers/models/seed_oss/__pycache__/modular_seed_oss.cpython-313.pyc,, +transformers/models/seed_oss/configuration_seed_oss.py,sha256=V_XjuaOrTU7hyAMvrsAQg-Vvmm3exGr-IX9uPgELdH0,12107 +transformers/models/seed_oss/modeling_seed_oss.py,sha256=5eMCbxE7t0yr0R7hk9bMjVaZP28sMZ6pKPfZRAZ29yU,22011 +transformers/models/seed_oss/modular_seed_oss.py,sha256=kaPESWCrk1VEjMfsy6BoZRDu_JOVs5WwkRZh4gkPwMA,7688 +transformers/models/segformer/__init__.py,sha256=KusPvx7i3IW86Z3RjiVjo3oNu-SsqwmkifgegRkSzKs,1185 +transformers/models/segformer/__pycache__/__init__.cpython-313.pyc,, +transformers/models/segformer/__pycache__/configuration_segformer.cpython-313.pyc,, +transformers/models/segformer/__pycache__/feature_extraction_segformer.cpython-313.pyc,, +transformers/models/segformer/__pycache__/image_processing_segformer.cpython-313.pyc,, +transformers/models/segformer/__pycache__/image_processing_segformer_fast.cpython-313.pyc,, +transformers/models/segformer/__pycache__/modeling_segformer.cpython-313.pyc,, +transformers/models/segformer/__pycache__/modeling_tf_segformer.cpython-313.pyc,, +transformers/models/segformer/__pycache__/modular_segformer.cpython-313.pyc,, +transformers/models/segformer/configuration_segformer.py,sha256=CyvERUX7QyWRo0mG7jwst-3jIqzOyD6YQ2ZdCkoTY_g,7429 +transformers/models/segformer/feature_extraction_segformer.py,sha256=7JHygTUT0pw36WCxl14kgaPvxqNBPFf9AOUuxQKHhlg,1324 +transformers/models/segformer/image_processing_segformer.py,sha256=4zstqJfk235643AT0ewDya4hhI9z5f99reO0-Kj1L_k,22252 +transformers/models/segformer/image_processing_segformer_fast.py,sha256=szsb6m7-mdAJeYYTGqJYHVNU4HR80vCNj9lr1Rs3VKE,10074 +transformers/models/segformer/modeling_segformer.py,sha256=r54Pc20Bl-tPOWCrU85ZaKUipKCCXwMAC-k1TE85Xxw,31598 +transformers/models/segformer/modeling_tf_segformer.py,sha256=99M87FnHfxDikZXn-2PmCnGiPEcTW4kxQpWC1tTaQw8,43687 +transformers/models/segformer/modular_segformer.py,sha256=82GCRkfnIsSv1faNznK8-w4TySJ3p4ofIaIGVE7MCGw,5827 +transformers/models/seggpt/__init__.py,sha256=RzV8DKCX1lOWGqXv2BlE1R7T4QuEcdYAVy_csccLvEw,1036 +transformers/models/seggpt/__pycache__/__init__.cpython-313.pyc,, +transformers/models/seggpt/__pycache__/configuration_seggpt.cpython-313.pyc,, +transformers/models/seggpt/__pycache__/image_processing_seggpt.cpython-313.pyc,, +transformers/models/seggpt/__pycache__/modeling_seggpt.cpython-313.pyc,, +transformers/models/seggpt/configuration_seggpt.py,sha256=VeHoocblt-EIRF-FO8JkrVGUcBPKbXPWAJMlXRllz44,6492 +transformers/models/seggpt/image_processing_seggpt.py,sha256=2hctN6Au2N4H8-2Kv7w0GIsh8Am6d3O5z4i7yp5HySE,31505 +transformers/models/seggpt/modeling_seggpt.py,sha256=eQgzzo7KzBk9Z7SPAl9U0IUAPnz_6WN_aPfZwjDYyJE,44650 +transformers/models/sew/__init__.py,sha256=POCF36ZRa_dr7oQhkDU2X17bsZuLoWI5V8DSihqr_vU,987 +transformers/models/sew/__pycache__/__init__.cpython-313.pyc,, +transformers/models/sew/__pycache__/configuration_sew.cpython-313.pyc,, +transformers/models/sew/__pycache__/feature_extraction_sew.cpython-313.pyc,, +transformers/models/sew/__pycache__/modeling_sew.cpython-313.pyc,, +transformers/models/sew/__pycache__/modular_sew.cpython-313.pyc,, +transformers/models/sew/configuration_sew.py,sha256=lJIjv2Ktr3PexNd9E8Q2cb3ewzh4W10eCk5M4d-JOyc,14231 +transformers/models/sew/feature_extraction_sew.py,sha256=szQVJwMsEe-9xZxeHiUFb5E-JFHc6gfTQNqwPp_kLiU,1883 +transformers/models/sew/modeling_sew.py,sha256=kYPlJjQM-hfUr9UCgsz1Rk8xlIuItUKvulvo93CA5es,47441 +transformers/models/sew/modular_sew.py,sha256=umXuB4_mJgHOy_4O4wsHUyocemK9WcQVTVOmQvPCdgc,18883 +transformers/models/sew_d/__init__.py,sha256=zE9sw10e_a1d-8-Jsb75z5frCjkFGD0dZMHAXiNgGwk,991 +transformers/models/sew_d/__pycache__/__init__.cpython-313.pyc,, +transformers/models/sew_d/__pycache__/configuration_sew_d.cpython-313.pyc,, +transformers/models/sew_d/__pycache__/modeling_sew_d.cpython-313.pyc,, +transformers/models/sew_d/configuration_sew_d.py,sha256=UXLYoDAIix9cWhSQVuavz4LgbQ2lvCxO5vE_bGsZei0,16191 +transformers/models/sew_d/modeling_sew_d.py,sha256=p2CuIOUUolce5x29v7yRqujC5p6a8QF5qQCOe6XJivg,69121 +transformers/models/shieldgemma2/__init__.py,sha256=B7eqFJSWi0p49QNvKqUGR8NPyFjQuMdBANevIjTsSxw,1048 +transformers/models/shieldgemma2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/shieldgemma2/__pycache__/configuration_shieldgemma2.cpython-313.pyc,, +transformers/models/shieldgemma2/__pycache__/modeling_shieldgemma2.cpython-313.pyc,, +transformers/models/shieldgemma2/__pycache__/processing_shieldgemma2.cpython-313.pyc,, +transformers/models/shieldgemma2/configuration_shieldgemma2.py,sha256=Cj4MM47fvnQVGqzEXeidaqrkSmz4-i11mYvz6yj4dvU,4805 +transformers/models/shieldgemma2/modeling_shieldgemma2.py,sha256=gI8F9mRgE3zPjdUcRD4kTpOvNvCgEM7Er3y8Y3i_POE,6039 +transformers/models/shieldgemma2/processing_shieldgemma2.py,sha256=vP_TLeEuh5MYgxGqc2P3Z9Z4CXuPJQd34QX7xkMYkvc,8380 +transformers/models/siglip/__init__.py,sha256=CnNqbSQ25tKLz0MGJVmhSXjVyASRDu7v5yjTHWYZ6M4,1160 +transformers/models/siglip/__pycache__/__init__.cpython-313.pyc,, +transformers/models/siglip/__pycache__/configuration_siglip.cpython-313.pyc,, +transformers/models/siglip/__pycache__/image_processing_siglip.cpython-313.pyc,, +transformers/models/siglip/__pycache__/image_processing_siglip_fast.cpython-313.pyc,, +transformers/models/siglip/__pycache__/modeling_siglip.cpython-313.pyc,, +transformers/models/siglip/__pycache__/processing_siglip.cpython-313.pyc,, +transformers/models/siglip/__pycache__/tokenization_siglip.cpython-313.pyc,, +transformers/models/siglip/configuration_siglip.py,sha256=3GVhImzaW3WCFyQ3IfC-ZM3ZA2qmvA_BEjmZyNOQyfk,11697 +transformers/models/siglip/image_processing_siglip.py,sha256=9obAmCFLGUqI-5cOZl_YbNyeMi001rA1asw25qH2sT4,12084 +transformers/models/siglip/image_processing_siglip_fast.py,sha256=3dwic9zjpzgxbCnQC84lUvcDOSnWIlEZrCUJItmi474,1257 +transformers/models/siglip/modeling_siglip.py,sha256=5HBtf1EAyqu7SwcyM_4xMYGqtMWjX6txxvV76ah4iiI,42079 +transformers/models/siglip/processing_siglip.py,sha256=Oit8g6wlMxBCp9fh6qvSuNZCQcQxZBlsoW6a77HjzC4,1594 +transformers/models/siglip/tokenization_siglip.py,sha256=QXJ1RdlwC6ESO0z3thY-tzXY5JL0VEsMBsWi-OgA4Vg,16047 +transformers/models/siglip2/__init__.py,sha256=dvfEVdLNJzWjugwTPGNp1gfxA6x4ytFLgGtV4Zfhoh4,1126 +transformers/models/siglip2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/siglip2/__pycache__/configuration_siglip2.cpython-313.pyc,, +transformers/models/siglip2/__pycache__/image_processing_siglip2.cpython-313.pyc,, +transformers/models/siglip2/__pycache__/image_processing_siglip2_fast.cpython-313.pyc,, +transformers/models/siglip2/__pycache__/modeling_siglip2.cpython-313.pyc,, +transformers/models/siglip2/__pycache__/modular_siglip2.cpython-313.pyc,, +transformers/models/siglip2/__pycache__/processing_siglip2.cpython-313.pyc,, +transformers/models/siglip2/configuration_siglip2.py,sha256=zs0zqZ9Tog9iwLK6yEN6peJErYCHN5N0iBB11Lp0FpA,12819 +transformers/models/siglip2/image_processing_siglip2.py,sha256=CgxErsXcPSf9OVsc-tXUr85w1l9Qm37gqc_zeo8CgFg,16118 +transformers/models/siglip2/image_processing_siglip2_fast.py,sha256=VexDWmZAXYBB6Pl8rX4mo9pfGdpXbl9PgLi8-lERUek,6233 +transformers/models/siglip2/modeling_siglip2.py,sha256=1PKsMaZXJbp_0LpqgmPwbBedgyEx701tNmnlk-oqbcE,48353 +transformers/models/siglip2/modular_siglip2.py,sha256=vtCU0qpFidhEXkTPMjutIv29DkrTBy4s-OjAGPBXxG0,25104 +transformers/models/siglip2/processing_siglip2.py,sha256=9ogRb_M32f_6UxB_6GZqf2a0oN_h9zY3c96GWBLiYDY,2178 +transformers/models/smollm3/__init__.py,sha256=BZA2MiDpGmv2swg1yO14tkgi_SZ0yVg8ndr-PJwY-fI,1000 +transformers/models/smollm3/__pycache__/__init__.cpython-313.pyc,, +transformers/models/smollm3/__pycache__/configuration_smollm3.cpython-313.pyc,, +transformers/models/smollm3/__pycache__/modeling_smollm3.cpython-313.pyc,, +transformers/models/smollm3/__pycache__/modular_smollm3.cpython-313.pyc,, +transformers/models/smollm3/configuration_smollm3.py,sha256=T-xosij-x2fdyPjNTED46vN0w8RL84wM8WGDgsdzHN8,13374 +transformers/models/smollm3/modeling_smollm3.py,sha256=p-OQ5bjuKILNNzhTesKZEJTM_-XlIoZwVOxK612Dw4k,22493 +transformers/models/smollm3/modular_smollm3.py,sha256=uab9U7hYXzDjuRWHLaUrgR4L9M7x3lcqrdgxgVmYEUg,16418 +transformers/models/smolvlm/__init__.py,sha256=fE-znTLrbxNe5qkHVgI4xmwFz-paFymZuxTAd2GKkOo,1126 +transformers/models/smolvlm/__pycache__/__init__.cpython-313.pyc,, +transformers/models/smolvlm/__pycache__/configuration_smolvlm.cpython-313.pyc,, +transformers/models/smolvlm/__pycache__/image_processing_smolvlm.cpython-313.pyc,, +transformers/models/smolvlm/__pycache__/image_processing_smolvlm_fast.cpython-313.pyc,, +transformers/models/smolvlm/__pycache__/modeling_smolvlm.cpython-313.pyc,, +transformers/models/smolvlm/__pycache__/modular_smolvlm.cpython-313.pyc,, +transformers/models/smolvlm/__pycache__/processing_smolvlm.cpython-313.pyc,, +transformers/models/smolvlm/__pycache__/video_processing_smolvlm.cpython-313.pyc,, +transformers/models/smolvlm/configuration_smolvlm.py,sha256=4XHkP6aHHeBOGOoLNn87AuAp5csfYpWo8QKW8TrauYI,9359 +transformers/models/smolvlm/image_processing_smolvlm.py,sha256=SLYyIc-WYrgw-g_-sADEJmhzPhAFww3RbWVq_P6NHsc,44554 +transformers/models/smolvlm/image_processing_smolvlm_fast.py,sha256=q-r6uelXrWE4LAxIWH4YdSAiq66RtAFyc5HMd2WoO48,23780 +transformers/models/smolvlm/modeling_smolvlm.py,sha256=LqZ0Hnhh1Tx1Gpxmlgljv_gdO2ZQ3w6pnKXKnjo_kPs,41990 +transformers/models/smolvlm/modular_smolvlm.py,sha256=kRdro6czM8tqsHyiXz8NjIY8yni4ng8bh-78XfIHYC0,19040 +transformers/models/smolvlm/processing_smolvlm.py,sha256=LEtjQgbvX76FlfrqkHIN12MTJ-o_gXWr00MG-lQb-rs,20291 +transformers/models/smolvlm/video_processing_smolvlm.py,sha256=TzjLnFdGMR6Umz-rR3yVgsL8aTBp-n5yPxVrnqfdsgw,14874 +transformers/models/speech_encoder_decoder/__init__.py,sha256=0MwevN904dCSAb0dvznhDH--q-m3-MzdCtx0B-T5hpk,1081 +transformers/models/speech_encoder_decoder/__pycache__/__init__.cpython-313.pyc,, +transformers/models/speech_encoder_decoder/__pycache__/configuration_speech_encoder_decoder.cpython-313.pyc,, +transformers/models/speech_encoder_decoder/__pycache__/modeling_flax_speech_encoder_decoder.cpython-313.pyc,, +transformers/models/speech_encoder_decoder/__pycache__/modeling_speech_encoder_decoder.cpython-313.pyc,, +transformers/models/speech_encoder_decoder/configuration_speech_encoder_decoder.py,sha256=2FBAuqwi4KNkbb7chAliDYZ46vYJiIjEwqtSh1oFSKY,4693 +transformers/models/speech_encoder_decoder/modeling_flax_speech_encoder_decoder.py,sha256=KCyHnYvQdlpFChAzDCgDs2UfY546A0i01U8e1YYGT0Q,44860 +transformers/models/speech_encoder_decoder/modeling_speech_encoder_decoder.py,sha256=IA54yPvoAUeirx9dPHtoo-LUaWKPER6aVHMBp7BFqP0,25575 +transformers/models/speech_to_text/__init__.py,sha256=qZzt5u1rbSsOjPVmX40R4b4pkL1mxOQZ66q8GPDKao8,1200 +transformers/models/speech_to_text/__pycache__/__init__.cpython-313.pyc,, +transformers/models/speech_to_text/__pycache__/configuration_speech_to_text.cpython-313.pyc,, +transformers/models/speech_to_text/__pycache__/feature_extraction_speech_to_text.cpython-313.pyc,, +transformers/models/speech_to_text/__pycache__/modeling_speech_to_text.cpython-313.pyc,, +transformers/models/speech_to_text/__pycache__/modeling_tf_speech_to_text.cpython-313.pyc,, +transformers/models/speech_to_text/__pycache__/processing_speech_to_text.cpython-313.pyc,, +transformers/models/speech_to_text/__pycache__/tokenization_speech_to_text.cpython-313.pyc,, +transformers/models/speech_to_text/configuration_speech_to_text.py,sha256=YazgmnHYt_lbYaRQNt2dEYAqgpAfYln36hQl89WPLF4,9825 +transformers/models/speech_to_text/feature_extraction_speech_to_text.py,sha256=r6vxaMiZM7KBxghUnd_WnDJ4brms95HimGR0bkzvglw,13955 +transformers/models/speech_to_text/modeling_speech_to_text.py,sha256=dcolyIrurG9SojhUohBDO45OHi-C_kI2x4TRW0mvABY,61717 +transformers/models/speech_to_text/modeling_tf_speech_to_text.py,sha256=-qOZso75ib0DJDvIGE4NcnQdP3Q97uALIiUqlRd5-qc,74289 +transformers/models/speech_to_text/processing_speech_to_text.py,sha256=jg7WkcQYWW00ndyN3PUo98EfswLlNNUc0izl1Iphy5E,4240 +transformers/models/speech_to_text/tokenization_speech_to_text.py,sha256=4VRmhzHQZvrplhvYdQFMN8v9xAWmz0UFW4TY9pI7ygw,11501 +transformers/models/speecht5/__init__.py,sha256=DploRLnZX4ZO40Z7BstCZ7aNWGuZE06tIeMo0GTyR60,1124 +transformers/models/speecht5/__pycache__/__init__.cpython-313.pyc,, +transformers/models/speecht5/__pycache__/configuration_speecht5.cpython-313.pyc,, +transformers/models/speecht5/__pycache__/feature_extraction_speecht5.cpython-313.pyc,, +transformers/models/speecht5/__pycache__/modeling_speecht5.cpython-313.pyc,, +transformers/models/speecht5/__pycache__/number_normalizer.cpython-313.pyc,, +transformers/models/speecht5/__pycache__/processing_speecht5.cpython-313.pyc,, +transformers/models/speecht5/__pycache__/tokenization_speecht5.cpython-313.pyc,, +transformers/models/speecht5/configuration_speecht5.py,sha256=q6TwW_M__spcgD9NOObNlcOZt8_xvL6V5Qm1yQZ1T1I,23466 +transformers/models/speecht5/feature_extraction_speecht5.py,sha256=MqnjfL1S_-VCnJhEEWgfv9kAYp2_nlO9MZ9OtsESJX0,17853 +transformers/models/speecht5/modeling_speecht5.py,sha256=Vd5tEqXzZ8SjTDLV7RJ6jiZXy2JnxMG2QHVPkFxkNyI,147725 +transformers/models/speecht5/number_normalizer.py,sha256=cxnEUdHSISW5eAo15cLuVkZa65zMFuMFaJ8zAOQCsAA,7019 +transformers/models/speecht5/processing_speecht5.py,sha256=PvI7tFsx_a_39n1TME0uDQSqQS3J9neLLFPwbrn40C0,6990 +transformers/models/speecht5/tokenization_speecht5.py,sha256=UfqBVrMUHCRRY_-kVvcU3RJnEfdAlq40ooE0UqV40ps,9009 +transformers/models/splinter/__init__.py,sha256=N3tdgJIqZRPK0g3pfLE3p3-HkGJMRf-GQ189anQ51to,1084 +transformers/models/splinter/__pycache__/__init__.cpython-313.pyc,, +transformers/models/splinter/__pycache__/configuration_splinter.cpython-313.pyc,, +transformers/models/splinter/__pycache__/modeling_splinter.cpython-313.pyc,, +transformers/models/splinter/__pycache__/tokenization_splinter.cpython-313.pyc,, +transformers/models/splinter/__pycache__/tokenization_splinter_fast.cpython-313.pyc,, +transformers/models/splinter/configuration_splinter.py,sha256=ZajZPX6f9K7gBqp2PbOtmJg-_fAU8h72tKdTNjyQV0M,5625 +transformers/models/splinter/modeling_splinter.py,sha256=TdgEoZC-qk2GXnaLhX_PWTY15CK0KGvrJhvtGF4_Ce0,37981 +transformers/models/splinter/tokenization_splinter.py,sha256=ba400uZlqtB6kjdMI3oMgBWs_p-px81zAQItZQx-l6c,20948 +transformers/models/splinter/tokenization_splinter_fast.py,sha256=AG6k691a2HJqtIAiEyn07WuSc4JgqU1HTkfEGC8Tt2c,8590 +transformers/models/squeezebert/__init__.py,sha256=_kzQtfoJetCK99e_FICGZl5DN8S2VVcOUFioGyN0sLI,1096 +transformers/models/squeezebert/__pycache__/__init__.cpython-313.pyc,, +transformers/models/squeezebert/__pycache__/configuration_squeezebert.cpython-313.pyc,, +transformers/models/squeezebert/__pycache__/modeling_squeezebert.cpython-313.pyc,, +transformers/models/squeezebert/__pycache__/tokenization_squeezebert.cpython-313.pyc,, +transformers/models/squeezebert/__pycache__/tokenization_squeezebert_fast.cpython-313.pyc,, +transformers/models/squeezebert/configuration_squeezebert.py,sha256=9I4mUuqEwKAIrezRjjto3HBfJ-aiWBTkQcIZWuJFFGM,7312 +transformers/models/squeezebert/modeling_squeezebert.py,sha256=XNcULy9rowxidymjHb8D3OWWVFaTsC9rHLPmJQsd5rQ,38738 +transformers/models/squeezebert/tokenization_squeezebert.py,sha256=4ZbPoFCjrt-d2zdbZ9s23IHOsN5iw9VpG11W4sJWCJM,20092 +transformers/models/squeezebert/tokenization_squeezebert_fast.py,sha256=CtqOSIMAYhS66yYjbWxTDpbbxNvAllTkP_lJroEyvfQ,6724 +transformers/models/stablelm/__init__.py,sha256=aVgWTcwBuuiGJDp8H_ZU6BvhYqjmNEqCukU7jEfwd_I,997 +transformers/models/stablelm/__pycache__/__init__.cpython-313.pyc,, +transformers/models/stablelm/__pycache__/configuration_stablelm.cpython-313.pyc,, +transformers/models/stablelm/__pycache__/modeling_stablelm.cpython-313.pyc,, +transformers/models/stablelm/configuration_stablelm.py,sha256=GqKL53dgijlmwQMgLsuw4jB6gm1stU4SaCETjqumKVs,10843 +transformers/models/stablelm/modeling_stablelm.py,sha256=DmRces0LIF0s84vdEPmdpaKCSajxP_nKfhEt88Hfsh0,45059 +transformers/models/starcoder2/__init__.py,sha256=fZ8HHZCGjxRfVgROe7zuoi9ADIAa4SeqxGHkvKUQiQM,1001 +transformers/models/starcoder2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/starcoder2/__pycache__/configuration_starcoder2.cpython-313.pyc,, +transformers/models/starcoder2/__pycache__/modeling_starcoder2.cpython-313.pyc,, +transformers/models/starcoder2/__pycache__/modular_starcoder2.cpython-313.pyc,, +transformers/models/starcoder2/configuration_starcoder2.py,sha256=o7FQ_LonUW6vTd8nNQuM44S_-0ypa21kwluLvrYZ3_k,10898 +transformers/models/starcoder2/modeling_starcoder2.py,sha256=LDaHFGV7cmONVhAyIe8lyV-QSntzGQQqM1JOtaqdVRU,21539 +transformers/models/starcoder2/modular_starcoder2.py,sha256=8cgY_xcPr3UBZ0NSmbvcg3KAomiDK-Y1WashMOAFBvM,9877 +transformers/models/superglue/__init__.py,sha256=Sg_nfSbBltkVhp40pVc04SthUCnXMX3kWHH_qC_YL4Y,1045 +transformers/models/superglue/__pycache__/__init__.cpython-313.pyc,, +transformers/models/superglue/__pycache__/configuration_superglue.cpython-313.pyc,, +transformers/models/superglue/__pycache__/image_processing_superglue.cpython-313.pyc,, +transformers/models/superglue/__pycache__/modeling_superglue.cpython-313.pyc,, +transformers/models/superglue/configuration_superglue.py,sha256=WD-orzD73GN0s_3On6qcv0jRS7PwC1yjgNBPNnXpOzc,5473 +transformers/models/superglue/image_processing_superglue.py,sha256=eKPx9qButG26SGKLQ5zaQQKjxQmM2ZDXiQ0b7_51D38,21954 +transformers/models/superglue/modeling_superglue.py,sha256=bBVf-RB3Vp2YO5dnlTfeLCBLcGkI7jCUSx5nad4jTIA,36118 +transformers/models/superpoint/__init__.py,sha256=6fQQ-p4220IUaIQCZseKItiHWVV7KOiA5mXoTdJSJmI,1100 +transformers/models/superpoint/__pycache__/__init__.cpython-313.pyc,, +transformers/models/superpoint/__pycache__/configuration_superpoint.cpython-313.pyc,, +transformers/models/superpoint/__pycache__/image_processing_superpoint.cpython-313.pyc,, +transformers/models/superpoint/__pycache__/image_processing_superpoint_fast.cpython-313.pyc,, +transformers/models/superpoint/__pycache__/modeling_superpoint.cpython-313.pyc,, +transformers/models/superpoint/configuration_superpoint.py,sha256=wWW7CLDM2VW-f41M_hLvq4N3j1gt_4QmsaNHifKLd_I,4048 +transformers/models/superpoint/image_processing_superpoint.py,sha256=5eUYxWamNm6Jm_mEhfJQYs2yxFH5AKgaM6XheXLXvBE,16414 +transformers/models/superpoint/image_processing_superpoint_fast.py,sha256=LxdFQ7JrXE1nAcB93KJqTkJayTZ4MuQnAwJqIHXUZ5Y,6837 +transformers/models/superpoint/modeling_superpoint.py,sha256=iqN6g4a2Yt8I-tantOd66ajRukiCzl1PcLFUIMDv71I,20071 +transformers/models/swiftformer/__init__.py,sha256=cW3-9efPxdjZV1KziM8j1S8e8wH3wJQhWqMXlULhG6c,1046 +transformers/models/swiftformer/__pycache__/__init__.cpython-313.pyc,, +transformers/models/swiftformer/__pycache__/configuration_swiftformer.cpython-313.pyc,, +transformers/models/swiftformer/__pycache__/modeling_swiftformer.cpython-313.pyc,, +transformers/models/swiftformer/__pycache__/modeling_tf_swiftformer.cpython-313.pyc,, +transformers/models/swiftformer/configuration_swiftformer.py,sha256=zc6uduEgdxwc_ApNrsADFqGSKC1qlH-kceHSuUWHQCI,5867 +transformers/models/swiftformer/modeling_swiftformer.py,sha256=p3RkkRJsjzLOC835594Q94T2tAv-91nG4V8cc8nk4v4,19526 +transformers/models/swiftformer/modeling_tf_swiftformer.py,sha256=BtStZyRloUcoWu3km_TLihPzMPiXA5QT88WQthuMS8Q,34959 +transformers/models/swin/__init__.py,sha256=7pcdahUG9WcEkEDRoUcMVxdonKglhOpXaQLo8xI6KTg,1025 +transformers/models/swin/__pycache__/__init__.cpython-313.pyc,, +transformers/models/swin/__pycache__/configuration_swin.cpython-313.pyc,, +transformers/models/swin/__pycache__/modeling_swin.cpython-313.pyc,, +transformers/models/swin/__pycache__/modeling_tf_swin.cpython-313.pyc,, +transformers/models/swin/configuration_swin.py,sha256=hcksE44MGT9_rWYNXvhyl94jqU00rQY3XXTDPzTlvmo,7958 +transformers/models/swin/modeling_swin.py,sha256=iwOA2YKG5e9jnxJUP_BdBObjRslIvDpCi7XxOetVnFg,54742 +transformers/models/swin/modeling_tf_swin.py,sha256=1ql5W5rY6oxRDd24dj-RGsWy-onKGQbeN1JZ3KaKdj4,70829 +transformers/models/swin2sr/__init__.py,sha256=PLCBXwTQF37hLur2ROcYXUiNropQ6u5Ig_HgK29MOu8,1088 +transformers/models/swin2sr/__pycache__/__init__.cpython-313.pyc,, +transformers/models/swin2sr/__pycache__/configuration_swin2sr.cpython-313.pyc,, +transformers/models/swin2sr/__pycache__/image_processing_swin2sr.cpython-313.pyc,, +transformers/models/swin2sr/__pycache__/image_processing_swin2sr_fast.cpython-313.pyc,, +transformers/models/swin2sr/__pycache__/modeling_swin2sr.cpython-313.pyc,, +transformers/models/swin2sr/configuration_swin2sr.py,sha256=6ZRVIyo6z1oQvPm13QvkrWcKpf1qjMf0QqwmdHMdvto,6841 +transformers/models/swin2sr/image_processing_swin2sr.py,sha256=h3J27aTqsI7mmVpttQOFYQt0pyYHk68MQhOZyFs53fc,9838 +transformers/models/swin2sr/image_processing_swin2sr_fast.py,sha256=o8qCErIx9f7RXomvEPCH9yUYqIEbUpprFaBCUdcDfCY,4699 +transformers/models/swin2sr/modeling_swin2sr.py,sha256=JCTeuihUJgaE-VddIkKaS5uKqFJ2PwDQf_ILEkt27xU,46884 +transformers/models/swinv2/__init__.py,sha256=njM902tlEQ82mYRN9ZTMOiXpJn1NHnxKbm_LCvn2I-M,993 +transformers/models/swinv2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/swinv2/__pycache__/configuration_swinv2.cpython-313.pyc,, +transformers/models/swinv2/__pycache__/modeling_swinv2.cpython-313.pyc,, +transformers/models/swinv2/configuration_swinv2.py,sha256=m2yjnprZXcqtF4dlw2JAv8MWjBBm6khztGzCUAp4rHw,7547 +transformers/models/swinv2/modeling_swinv2.py,sha256=FynHY4dYJa4H1D-M4wQ37rZkYuPe0ITfSFEhSZjb0fE,58956 +transformers/models/switch_transformers/__init__.py,sha256=Iw38A9kfIT5mJ0G00YE-TVN-M_b1DBHYQqb0pEyTZMY,1019 +transformers/models/switch_transformers/__pycache__/__init__.cpython-313.pyc,, +transformers/models/switch_transformers/__pycache__/configuration_switch_transformers.cpython-313.pyc,, +transformers/models/switch_transformers/__pycache__/modeling_switch_transformers.cpython-313.pyc,, +transformers/models/switch_transformers/configuration_switch_transformers.py,sha256=hqXjdBHj-oqNPBPzwH-e5-dKYDPw2lfWgc3oDegHHVE,9054 +transformers/models/switch_transformers/modeling_switch_transformers.py,sha256=WstSfVCgfkvOOl3sTbTwxHbLSgHo6wUhnyZgcDShKoM,84654 +transformers/models/t5/__init__.py,sha256=hCQO8nkKAJqFgMOwC7nxhyDYOUA9fcDT0pDb7DAHt5Y,1130 +transformers/models/t5/__pycache__/__init__.cpython-313.pyc,, +transformers/models/t5/__pycache__/configuration_t5.cpython-313.pyc,, +transformers/models/t5/__pycache__/modeling_flax_t5.cpython-313.pyc,, +transformers/models/t5/__pycache__/modeling_t5.cpython-313.pyc,, +transformers/models/t5/__pycache__/modeling_tf_t5.cpython-313.pyc,, +transformers/models/t5/__pycache__/tokenization_t5.cpython-313.pyc,, +transformers/models/t5/__pycache__/tokenization_t5_fast.cpython-313.pyc,, +transformers/models/t5/configuration_t5.py,sha256=1fXcdM1_SwAjVwAo2pyiWoxXuoZ6meJaqI6btUPH-bU,7381 +transformers/models/t5/modeling_flax_t5.py,sha256=l-uYt9Ze0OemY-eF2BVoCmYomkQeJieRMuozgr7pqk8,74353 +transformers/models/t5/modeling_t5.py,sha256=7hoEP1wDxoSiu7AhPqOz-L4nKgOriEvcpXwx5zFee34,109941 +transformers/models/t5/modeling_tf_t5.py,sha256=lW4_eVKZY317xGPnXt1L-q0fAp44wSXiRb85vOJEjY8,76956 +transformers/models/t5/tokenization_t5.py,sha256=BZJ_dt1beGBH1qWSpa99UHv-hJ3C_jo1zFHKc07yBhA,19952 +transformers/models/t5/tokenization_t5_fast.py,sha256=E1E_dH9RXXh7ei1c1u9Q_518o-n5x0mrhMJlHvlqLb8,10048 +transformers/models/t5gemma/__init__.py,sha256=2QjFw4aK-Ui2JuImfxWN8oeMkMwokEzurG8wPvKv98Y,1005 +transformers/models/t5gemma/__pycache__/__init__.cpython-313.pyc,, +transformers/models/t5gemma/__pycache__/configuration_t5gemma.cpython-313.pyc,, +transformers/models/t5gemma/__pycache__/modeling_t5gemma.cpython-313.pyc,, +transformers/models/t5gemma/__pycache__/modular_t5gemma.cpython-313.pyc,, +transformers/models/t5gemma/configuration_t5gemma.py,sha256=ZJBaR9MKuXG8bs6iKd1iBZTt36OYzgB_pDr4mLcybvo,16241 +transformers/models/t5gemma/modeling_t5gemma.py,sha256=wE7sb0oufF7LlKjT4W-_6rDqqjvH24RfFiuopb_tQ4g,60724 +transformers/models/t5gemma/modular_t5gemma.py,sha256=UoPUgci0tziHx0TmPNj9xn6b1cEqG0MsqZA_5iNqtL8,52350 +transformers/models/table_transformer/__init__.py,sha256=VT-KM0_6LZ6fdOAglbfA8zEhCQuYa6He10Div7WEcD8,1015 +transformers/models/table_transformer/__pycache__/__init__.cpython-313.pyc,, +transformers/models/table_transformer/__pycache__/configuration_table_transformer.cpython-313.pyc,, +transformers/models/table_transformer/__pycache__/modeling_table_transformer.cpython-313.pyc,, +transformers/models/table_transformer/configuration_table_transformer.py,sha256=sXV9fFcqNqz9K7zYPCFZyDXfjr42GhotJ7th5UCdH54,13613 +transformers/models/table_transformer/modeling_table_transformer.py,sha256=RYS0x53pWkPkh8OiSfhBLTe2ioplGetkvQu-RG8Vj1A,61338 +transformers/models/tapas/__init__.py,sha256=DQTmog2nYukVsXxARy8v35SitI0Iv4ZLCGl7zUlLDuI,1066 +transformers/models/tapas/__pycache__/__init__.cpython-313.pyc,, +transformers/models/tapas/__pycache__/configuration_tapas.cpython-313.pyc,, +transformers/models/tapas/__pycache__/modeling_tapas.cpython-313.pyc,, +transformers/models/tapas/__pycache__/modeling_tf_tapas.cpython-313.pyc,, +transformers/models/tapas/__pycache__/tokenization_tapas.cpython-313.pyc,, +transformers/models/tapas/configuration_tapas.py,sha256=i1_a_AArLjS8SkWu0Du8TC7JZBfvMUSku2QteSdfnC4,12293 +transformers/models/tapas/modeling_tapas.py,sha256=6-CTY0FbAgSAl5-_r2AgvFNw_3KeGRa8H7uTjYC6Kwk,108838 +transformers/models/tapas/modeling_tf_tapas.py,sha256=HpG3CLwp21ZKZ39Kew6FakiaWuCuv0ZFXmFdMs2WFlw,112267 +transformers/models/tapas/tokenization_tapas.py,sha256=tN5PcvOE_GT9NOYggB4GYI0kRl3mj_gyqUHDQ0bjGXE,118445 +transformers/models/textnet/__init__.py,sha256=nqXafUk5z5F_61jrGF9gK_sI66uj7kq5sUYrit9bsno,1088 +transformers/models/textnet/__pycache__/__init__.cpython-313.pyc,, +transformers/models/textnet/__pycache__/configuration_textnet.cpython-313.pyc,, +transformers/models/textnet/__pycache__/image_processing_textnet.cpython-313.pyc,, +transformers/models/textnet/__pycache__/image_processing_textnet_fast.cpython-313.pyc,, +transformers/models/textnet/__pycache__/modeling_textnet.cpython-313.pyc,, +transformers/models/textnet/configuration_textnet.py,sha256=ZGtG42UxM2RbWr7pSN1IUIBo74aK8Vq79Gg2-vfFWp4,6212 +transformers/models/textnet/image_processing_textnet.py,sha256=H_Lp_-3oe-_P8dzHEa_BkPn7Gp8mrbiic87Ld4XYkkw,17731 +transformers/models/textnet/image_processing_textnet_fast.py,sha256=OU_nnaC0e9tbMc3r5NYDc_k26PpcR1SUfS7h63Yn4mc,5736 +transformers/models/textnet/modeling_textnet.py,sha256=oy3NC67XYjrbPr8iujuO6fLYdrn6REIZncOkYP1fvEY,15517 +transformers/models/time_series_transformer/__init__.py,sha256=3A_3Wog-6NDwCoBIMtkzJv9slc_wXpzDzsOo-xBQ8hE,1027 +transformers/models/time_series_transformer/__pycache__/__init__.cpython-313.pyc,, +transformers/models/time_series_transformer/__pycache__/configuration_time_series_transformer.cpython-313.pyc,, +transformers/models/time_series_transformer/__pycache__/modeling_time_series_transformer.cpython-313.pyc,, +transformers/models/time_series_transformer/configuration_time_series_transformer.py,sha256=jNs-oZ17yVDy4g-shNyOLoA9pupIt9ZlBbX5BXRLyYo,11695 +transformers/models/time_series_transformer/modeling_time_series_transformer.py,sha256=sqb8fls8_cJ-h2dkfeWrkqCPGgCiCs8FeV-rRn-Npi0,95480 +transformers/models/timesfm/__init__.py,sha256=gcfLgRAbwZThFP98fst9wsoTMB0fkR28tzWYoQIs5qU,995 +transformers/models/timesfm/__pycache__/__init__.cpython-313.pyc,, +transformers/models/timesfm/__pycache__/configuration_timesfm.cpython-313.pyc,, +transformers/models/timesfm/__pycache__/modeling_timesfm.cpython-313.pyc,, +transformers/models/timesfm/__pycache__/modular_timesfm.cpython-313.pyc,, +transformers/models/timesfm/configuration_timesfm.py,sha256=OBKxwrNeeak-YgKtKL4zTgs705E9to4FCbD7wX0J1Gs,5715 +transformers/models/timesfm/modeling_timesfm.py,sha256=oW_6yao7FOzfvlXxGVAsdK2AZlfX7H1f0R77yUDR-oQ,34502 +transformers/models/timesfm/modular_timesfm.py,sha256=RE1ypm8pygCjjZQ2lUfEfwqiY3v8wYdWHJrEsFIkdeo,32164 +transformers/models/timesformer/__init__.py,sha256=4ODuyNRrYkbgpSbMHJX8XmpJdekHlu__zWey-plUSgI,1003 +transformers/models/timesformer/__pycache__/__init__.cpython-313.pyc,, +transformers/models/timesformer/__pycache__/configuration_timesformer.cpython-313.pyc,, +transformers/models/timesformer/__pycache__/modeling_timesformer.cpython-313.pyc,, +transformers/models/timesformer/configuration_timesformer.py,sha256=GilCKil_40B_hqjh0-02CWrBupbwEfHhOZ3b5bUpTPI,5568 +transformers/models/timesformer/modeling_timesformer.py,sha256=C05CUfZFsKM4ckKg8PSJTBUEk8KDo7fFUgqa1HjngGM,32808 +transformers/models/timm_backbone/__init__.py,sha256=s0GlTaJ43Yt9ZdzG9-qjJNlp0Ol4vjN-14S6N7gXLsA,1007 +transformers/models/timm_backbone/__pycache__/__init__.cpython-313.pyc,, +transformers/models/timm_backbone/__pycache__/configuration_timm_backbone.cpython-313.pyc,, +transformers/models/timm_backbone/__pycache__/modeling_timm_backbone.cpython-313.pyc,, +transformers/models/timm_backbone/configuration_timm_backbone.py,sha256=CMRsZX3ZQiI1bzBrza3Eqgjy8XEid8dPfJZVuhtLTn8,3186 +transformers/models/timm_backbone/modeling_timm_backbone.py,sha256=8k2tlfAtgX2FguLvxtXc_15jCjbnq4lXKq_BalsgxT8,6635 +transformers/models/timm_wrapper/__init__.py,sha256=U19BCtZcAQdmBto5bqG6u69W5rnUBtIyP11EcRh_0Tk,1054 +transformers/models/timm_wrapper/__pycache__/__init__.cpython-313.pyc,, +transformers/models/timm_wrapper/__pycache__/configuration_timm_wrapper.cpython-313.pyc,, +transformers/models/timm_wrapper/__pycache__/image_processing_timm_wrapper.cpython-313.pyc,, +transformers/models/timm_wrapper/__pycache__/modeling_timm_wrapper.cpython-313.pyc,, +transformers/models/timm_wrapper/configuration_timm_wrapper.py,sha256=uvp6s_QCjKBXSO461YIEJugvQ6Wbo9BkZ-peFc8nt7s,5509 +transformers/models/timm_wrapper/image_processing_timm_wrapper.py,sha256=hV9q0IaHBS-hl0FwNUwam3uEXWf4sC7YrQTGjMLMR8k,5348 +transformers/models/timm_wrapper/modeling_timm_wrapper.py,sha256=7pxeqAIhm73Zj1bgt64hO7YwGXxfXeBOwX14PnKqcBQ,16677 +transformers/models/trocr/__init__.py,sha256=Hllbq_42XbGRZyXsGOzYHcb33MOA5_yfijMRKEXJ4n4,1027 +transformers/models/trocr/__pycache__/__init__.cpython-313.pyc,, +transformers/models/trocr/__pycache__/configuration_trocr.cpython-313.pyc,, +transformers/models/trocr/__pycache__/modeling_trocr.cpython-313.pyc,, +transformers/models/trocr/__pycache__/processing_trocr.cpython-313.pyc,, +transformers/models/trocr/configuration_trocr.py,sha256=mm8gO1FagOM7OpQ9S7TZ9UrNc7or081ymZz-q3uss3s,6558 +transformers/models/trocr/modeling_trocr.py,sha256=0fAS9zHFFwwVFHbGbKpiqalER5MSNRaHLavOGOXWOsA,39336 +transformers/models/trocr/processing_trocr.py,sha256=ts4bW7BoYIqffiXK4Iin6W1i-5h8AsHbIGBx6SaLBWE,5738 +transformers/models/tvp/__init__.py,sha256=zY4KwnUt4Q8f_afJGLIxUIfZf8RIyOOCbv9tnAJk1ic,1106 +transformers/models/tvp/__pycache__/__init__.cpython-313.pyc,, +transformers/models/tvp/__pycache__/configuration_tvp.cpython-313.pyc,, +transformers/models/tvp/__pycache__/image_processing_tvp.cpython-313.pyc,, +transformers/models/tvp/__pycache__/image_processing_tvp_fast.cpython-313.pyc,, +transformers/models/tvp/__pycache__/modeling_tvp.cpython-313.pyc,, +transformers/models/tvp/__pycache__/processing_tvp.cpython-313.pyc,, +transformers/models/tvp/configuration_tvp.py,sha256=DpgrPqYGfjA6OSNkh5UsdkLw8RWFF6qh90Zrz6U9pn4,10147 +transformers/models/tvp/image_processing_tvp.py,sha256=LRQ9EvVO1MemVGbQqtzLC3zUksA__g-xXZXUHLV_Fh0,22750 +transformers/models/tvp/image_processing_tvp_fast.py,sha256=XIrD2hGP59xpTfkCAqCJnCUrbWkDw_kJTkFwMOKuWkM,8477 +transformers/models/tvp/modeling_tvp.py,sha256=LxF_XsdNsgP-YmJaa_iUCww7RLbN4gwBoX1_dA0VvHY,40032 +transformers/models/tvp/processing_tvp.py,sha256=e1h71Tq5NDHBeea7uS3Kc61m-PisGoYvgss4Q-Tgjdo,2648 +transformers/models/udop/__init__.py,sha256=CqFpHruzC__VtxEcVz31QxxMpBI1mjO77-Lj0RqW4Eo,1103 +transformers/models/udop/__pycache__/__init__.cpython-313.pyc,, +transformers/models/udop/__pycache__/configuration_udop.cpython-313.pyc,, +transformers/models/udop/__pycache__/modeling_udop.cpython-313.pyc,, +transformers/models/udop/__pycache__/processing_udop.cpython-313.pyc,, +transformers/models/udop/__pycache__/tokenization_udop.cpython-313.pyc,, +transformers/models/udop/__pycache__/tokenization_udop_fast.cpython-313.pyc,, +transformers/models/udop/configuration_udop.py,sha256=xzaHEk_1LtY4AqHr10qL2Vt7Yi-CRgCO98Ni_OvRPgg,7675 +transformers/models/udop/modeling_udop.py,sha256=SkHCGohwJ1dpt7-JIeRSzncKsouh3__yWtbYnc-Ktnw,92185 +transformers/models/udop/processing_udop.py,sha256=Huz5GGRLuRhY6S9P9yxzowgz8JC3LozZkoyZUBfRjIY,8642 +transformers/models/udop/tokenization_udop.py,sha256=8wBBqyD99Y_tcP8q_LHZiIITj26kKdMRtLAeaIH91EU,71827 +transformers/models/udop/tokenization_udop_fast.py,sha256=cvfqL2DiAyd2_d5BL14U6XQScBvM8JoGizTM3tLSik0,49663 +transformers/models/umt5/__init__.py,sha256=FKt6Ap3AvOCIKoeOM-5qY84lNEML9IujaDaYROINJMs,989 +transformers/models/umt5/__pycache__/__init__.cpython-313.pyc,, +transformers/models/umt5/__pycache__/configuration_umt5.cpython-313.pyc,, +transformers/models/umt5/__pycache__/modeling_umt5.cpython-313.pyc,, +transformers/models/umt5/configuration_umt5.py,sha256=W60fZhT2upRLbNTauRSs1-K0HSB5aCV4m79TFSXO1VI,7749 +transformers/models/umt5/modeling_umt5.py,sha256=m86drqG6CRNml3lD60ZRP0KbCehBReCvzfgfYepB2Fs,90935 +transformers/models/unispeech/__init__.py,sha256=AXJMExDoYYI71OKNXhAt7lyqcFIvcLHEQ1Fsm171m5w,999 +transformers/models/unispeech/__pycache__/__init__.cpython-313.pyc,, +transformers/models/unispeech/__pycache__/configuration_unispeech.cpython-313.pyc,, +transformers/models/unispeech/__pycache__/modeling_unispeech.cpython-313.pyc,, +transformers/models/unispeech/__pycache__/modular_unispeech.cpython-313.pyc,, +transformers/models/unispeech/configuration_unispeech.py,sha256=THcTvZNOVsDyoVkiDZ5kKGu5hmNu0qM2lal-P-WPDos,17510 +transformers/models/unispeech/modeling_unispeech.py,sha256=8pZQ3HXzcXXzgp8HpjkzniWKKfERCmGHDtQpjIi7hOU,64373 +transformers/models/unispeech/modular_unispeech.py,sha256=A7tDao9K2QPRdy7WpLGKJLWmYFxpp9f4eqAzkCFvtdE,18207 +transformers/models/unispeech_sat/__init__.py,sha256=P9lCzMg01s4Gj_Pb8t1l36MRAeoOcxUa4d7dbQSe9N4,1007 +transformers/models/unispeech_sat/__pycache__/__init__.cpython-313.pyc,, +transformers/models/unispeech_sat/__pycache__/configuration_unispeech_sat.cpython-313.pyc,, +transformers/models/unispeech_sat/__pycache__/modeling_unispeech_sat.cpython-313.pyc,, +transformers/models/unispeech_sat/__pycache__/modular_unispeech_sat.cpython-313.pyc,, +transformers/models/unispeech_sat/configuration_unispeech_sat.py,sha256=3CduPVTYRqVQWaZbD_oA3wsCl_7v95Fn4RuWpPi6VhQ,18855 +transformers/models/unispeech_sat/modeling_unispeech_sat.py,sha256=Ri6oR_eW82UilW8mZ0UMIFSx6OPK2KV-Z9izxQaA3I0,78446 +transformers/models/unispeech_sat/modular_unispeech_sat.py,sha256=UsFpwELhczFJTXGNoHClgGFzP-ZFWBFVQ5HQR6S5KQY,18606 +transformers/models/univnet/__init__.py,sha256=hfHyxyKGEfd58p1fUSA3IxK2q6JkVatkGceVaoKuODk,1041 +transformers/models/univnet/__pycache__/__init__.cpython-313.pyc,, +transformers/models/univnet/__pycache__/configuration_univnet.cpython-313.pyc,, +transformers/models/univnet/__pycache__/feature_extraction_univnet.cpython-313.pyc,, +transformers/models/univnet/__pycache__/modeling_univnet.cpython-313.pyc,, +transformers/models/univnet/configuration_univnet.py,sha256=dwE48PdXxA4_3tbux06b7HAsdUK9c5-capcOdDeAr9c,6758 +transformers/models/univnet/feature_extraction_univnet.py,sha256=9iNfhNCBNRWaY7odFlzXzMLzauhSzgFGdr20sQ4xPWw,22880 +transformers/models/univnet/modeling_univnet.py,sha256=cngw_Vo6uRk_-mG6Fv95xaXDGpxVVEarC7ewQDr2Wqw,25749 +transformers/models/upernet/__init__.py,sha256=Wq3u7yXJul5PLmjalxKgx451sa_WuSXbEM45bZsRv3E,995 +transformers/models/upernet/__pycache__/__init__.cpython-313.pyc,, +transformers/models/upernet/__pycache__/configuration_upernet.cpython-313.pyc,, +transformers/models/upernet/__pycache__/modeling_upernet.cpython-313.pyc,, +transformers/models/upernet/configuration_upernet.py,sha256=ih_8sDO-OzHJOCqUDhtiBOtRanCrLfhx3c6OBZryILI,6859 +transformers/models/upernet/modeling_upernet.py,sha256=vWF4TKPs6t8AxHBVOGsw2BaHRyXnithQ3RFs-iNRHUM,14577 +transformers/models/vaultgemma/__init__.py,sha256=NxMaPmECIwYDSIgI9BuyVwsLmZEwagfSCY2Wy0YAiOQ,1017 +transformers/models/vaultgemma/__pycache__/__init__.cpython-313.pyc,, +transformers/models/vaultgemma/__pycache__/configuration_vaultgemma.cpython-313.pyc,, +transformers/models/vaultgemma/__pycache__/modeling_vaultgemma.cpython-313.pyc,, +transformers/models/vaultgemma/__pycache__/modular_vaultgemma.cpython-313.pyc,, +transformers/models/vaultgemma/configuration_vaultgemma.py,sha256=8PN4wzNi91UmX25nO7IiCXBKqHhGOTa7RBnOZ_VNf8M,9672 +transformers/models/vaultgemma/modeling_vaultgemma.py,sha256=MFSclu_qyX0foCw4LZIvMrnsIdz7y48-C6u1OKBG2kc,24857 +transformers/models/vaultgemma/modular_vaultgemma.py,sha256=_VKyFOSmR1sASCQBBxotRccrOUBqqaXAq0q1svDfj5Q,2802 +transformers/models/video_llava/__init__.py,sha256=bsLGp1WBBO_AvNVRxzOn5k7OYQIbX9SqFhESd24FImc,1093 +transformers/models/video_llava/__pycache__/__init__.cpython-313.pyc,, +transformers/models/video_llava/__pycache__/configuration_video_llava.cpython-313.pyc,, +transformers/models/video_llava/__pycache__/image_processing_video_llava.cpython-313.pyc,, +transformers/models/video_llava/__pycache__/modeling_video_llava.cpython-313.pyc,, +transformers/models/video_llava/__pycache__/processing_video_llava.cpython-313.pyc,, +transformers/models/video_llava/__pycache__/video_processing_video_llava.cpython-313.pyc,, +transformers/models/video_llava/configuration_video_llava.py,sha256=zetmD2cvfHzFEiU8PJn1qWWVPwMgT08YMR3q-DTPukk,6448 +transformers/models/video_llava/image_processing_video_llava.py,sha256=1pqD3NxiK0Y5Eb5JcuExB6t_3ca5wDGNh6TQ-R1V0h8,19160 +transformers/models/video_llava/modeling_video_llava.py,sha256=XJ0nmB_o6gd0-J5U4Cxb1SiaHYxlOhvixRgzeDRt9ak,33549 +transformers/models/video_llava/processing_video_llava.py,sha256=1iOpeBgUC0Lr5Yq8d9g0ytne_13Gg2CR2CWu9CelfxE,10786 +transformers/models/video_llava/video_processing_video_llava.py,sha256=fE7ZIouajWIxNfpJAjpTUqkRQwICpPwzkaVBUutcn2Q,1678 +transformers/models/videomae/__init__.py,sha256=IYw3qXj1-PDmBAp---YaZyqdBsIjdMZQI37xT_-9SgY,1089 +transformers/models/videomae/__pycache__/__init__.cpython-313.pyc,, +transformers/models/videomae/__pycache__/configuration_videomae.cpython-313.pyc,, +transformers/models/videomae/__pycache__/feature_extraction_videomae.cpython-313.pyc,, +transformers/models/videomae/__pycache__/image_processing_videomae.cpython-313.pyc,, +transformers/models/videomae/__pycache__/modeling_videomae.cpython-313.pyc,, +transformers/models/videomae/configuration_videomae.py,sha256=O0BwqYZnc9Q5Kpemmel6rOxeDBSj7KKCxgpHfMVCVGE,6600 +transformers/models/videomae/feature_extraction_videomae.py,sha256=YfjgYL2im5-5OtnL_U9Z72Fxm58jNAIQWkUlszLJEtY,1316 +transformers/models/videomae/image_processing_videomae.py,sha256=JEvCBnW-u36BnclEMi6PC9pyVf_MabxE3y8nHR3-E-E,16791 +transformers/models/videomae/modeling_videomae.py,sha256=6FL19ketIRuIcrBzGXlSKxsGYz4oJkTlaoaU5kjtW48,38363 +transformers/models/vilt/__init__.py,sha256=efaZSTGsk3QhZmBrc6F29q55LkC_1Vb8fNC0MY4881Q,1154 +transformers/models/vilt/__pycache__/__init__.cpython-313.pyc,, +transformers/models/vilt/__pycache__/configuration_vilt.cpython-313.pyc,, +transformers/models/vilt/__pycache__/feature_extraction_vilt.cpython-313.pyc,, +transformers/models/vilt/__pycache__/image_processing_vilt.cpython-313.pyc,, +transformers/models/vilt/__pycache__/image_processing_vilt_fast.cpython-313.pyc,, +transformers/models/vilt/__pycache__/modeling_vilt.cpython-313.pyc,, +transformers/models/vilt/__pycache__/processing_vilt.cpython-313.pyc,, +transformers/models/vilt/configuration_vilt.py,sha256=B7lnWQV7QC5CeliGPQF5TP5Ci-s35bv7_LX4UvOVNUs,6817 +transformers/models/vilt/feature_extraction_vilt.py,sha256=OYz67RYXTxX9oQpJ9b-lSzCduexmgugUpkiPHSfcs9s,1284 +transformers/models/vilt/image_processing_vilt.py,sha256=MWYvUw6fi9XIJOI2FJ379a8wFAto1hO7HTa-L3Y0hgc,23304 +transformers/models/vilt/image_processing_vilt_fast.py,sha256=w2f2VPmSVCCcLFiPMKIrjraVnYN7T4pdxCeFFxVpabI,9457 +transformers/models/vilt/modeling_vilt.py,sha256=jG91LCGy1FEUnZnh42EBSI6Y4oHKkhLV_vFzE8SWnPg,57560 +transformers/models/vilt/processing_vilt.py,sha256=N8C1XiqnMb7a5SbMDiDlIqdrm31K-XjAEQn1jCGc0Vk,3371 +transformers/models/vipllava/__init__.py,sha256=HJ5mZUNdt_bmaC9l-GycD7mVT2r1oN15prmnlBtz6oA,997 +transformers/models/vipllava/__pycache__/__init__.cpython-313.pyc,, +transformers/models/vipllava/__pycache__/configuration_vipllava.cpython-313.pyc,, +transformers/models/vipllava/__pycache__/modeling_vipllava.cpython-313.pyc,, +transformers/models/vipllava/__pycache__/modular_vipllava.cpython-313.pyc,, +transformers/models/vipllava/configuration_vipllava.py,sha256=BUvvdV2CgiEp5h-pWbuEUi7t8bnkl6haqljNtf3en6c,5049 +transformers/models/vipllava/modeling_vipllava.py,sha256=e54zVMrTPBuour9sgr5DfwNriKkfDGtWVAP1pZ2Uj9A,20614 +transformers/models/vipllava/modular_vipllava.py,sha256=1K_TeonOFDO_UZR_6MBHOJyGW5V8Vx6pNWg3u1pqv8Y,12339 +transformers/models/vision_encoder_decoder/__init__.py,sha256=xK5xKVeIOZSN1d9Y2nDa3NYkLdGidbwgQ6Es8JhzKzA,1135 +transformers/models/vision_encoder_decoder/__pycache__/__init__.cpython-313.pyc,, +transformers/models/vision_encoder_decoder/__pycache__/configuration_vision_encoder_decoder.cpython-313.pyc,, +transformers/models/vision_encoder_decoder/__pycache__/modeling_flax_vision_encoder_decoder.cpython-313.pyc,, +transformers/models/vision_encoder_decoder/__pycache__/modeling_tf_vision_encoder_decoder.cpython-313.pyc,, +transformers/models/vision_encoder_decoder/__pycache__/modeling_vision_encoder_decoder.cpython-313.pyc,, +transformers/models/vision_encoder_decoder/configuration_vision_encoder_decoder.py,sha256=6uLyTC16qnr7xboS9yTb1C6OvVWu2snFata6p85Crcs,8475 +transformers/models/vision_encoder_decoder/modeling_flax_vision_encoder_decoder.py,sha256=K_vwvC2W7Sz_j4gh3DbnplwdIH3zRNA5Tlk9v-qbxBw,41615 +transformers/models/vision_encoder_decoder/modeling_tf_vision_encoder_decoder.py,sha256=t7zHO-hBGMm-wlMQQN7x-nCBqUclSXP93U--Iun8iG8,36115 +transformers/models/vision_encoder_decoder/modeling_vision_encoder_decoder.py,sha256=L3UvMzl75fC-koHjos9bmnmq7i1_Qk7cl0IF19Xc4so,29373 +transformers/models/vision_text_dual_encoder/__init__.py,sha256=LRXs5oXk4_8AaHuIVaj1IgBO4X1vwP-ehQC1T1xEiAI,1198 +transformers/models/vision_text_dual_encoder/__pycache__/__init__.cpython-313.pyc,, +transformers/models/vision_text_dual_encoder/__pycache__/configuration_vision_text_dual_encoder.cpython-313.pyc,, +transformers/models/vision_text_dual_encoder/__pycache__/modeling_flax_vision_text_dual_encoder.cpython-313.pyc,, +transformers/models/vision_text_dual_encoder/__pycache__/modeling_tf_vision_text_dual_encoder.cpython-313.pyc,, +transformers/models/vision_text_dual_encoder/__pycache__/modeling_vision_text_dual_encoder.cpython-313.pyc,, +transformers/models/vision_text_dual_encoder/__pycache__/processing_vision_text_dual_encoder.cpython-313.pyc,, +transformers/models/vision_text_dual_encoder/configuration_vision_text_dual_encoder.py,sha256=Zqb3nGZWG-J3a2FPUY4ocbDYWiLVeZOiFua-MXTDUfQ,5023 +transformers/models/vision_text_dual_encoder/modeling_flax_vision_text_dual_encoder.py,sha256=ejVh6jvNJhfxXc70KAyy0UZxj_xWpcrly7sbUclBi54,26399 +transformers/models/vision_text_dual_encoder/modeling_tf_vision_text_dual_encoder.py,sha256=NnCOaAYtjQ4yUv90Am7cHnxKU9VgDDnXGQqnJSsgEs4,28626 +transformers/models/vision_text_dual_encoder/modeling_vision_text_dual_encoder.py,sha256=1kiv0PqgdNXflKJDA5fxpfbYRQu1ewfCS1OG8bFFqhA,17987 +transformers/models/vision_text_dual_encoder/processing_vision_text_dual_encoder.py,sha256=T7I9V7xJoBWFtXYu9QgFE5NgImm2gfuu2ymKbUozGXA,2854 +transformers/models/visual_bert/__init__.py,sha256=zZFHfkE7OUMZUwYvB7v4ZIBXVUW9Mboqoa1QdTQURWM,1003 +transformers/models/visual_bert/__pycache__/__init__.cpython-313.pyc,, +transformers/models/visual_bert/__pycache__/configuration_visual_bert.cpython-313.pyc,, +transformers/models/visual_bert/__pycache__/modeling_visual_bert.cpython-313.pyc,, +transformers/models/visual_bert/configuration_visual_bert.py,sha256=4U17YnlSjbOpzsAPdGH_EfvBjv7jppbHWlmLBrchGM4,6767 +transformers/models/visual_bert/modeling_visual_bert.py,sha256=5CvusWewEyuj_U-RXAzI_76POb8eQ5VP7-XMfdXZ-vk,70212 +transformers/models/vit/__init__.py,sha256=uTQRjeWgJLHyXfc7yVOEyv7wnr42Jhy-8p9k5UUbxAM,1186 +transformers/models/vit/__pycache__/__init__.cpython-313.pyc,, +transformers/models/vit/__pycache__/configuration_vit.cpython-313.pyc,, +transformers/models/vit/__pycache__/feature_extraction_vit.cpython-313.pyc,, +transformers/models/vit/__pycache__/image_processing_vit.cpython-313.pyc,, +transformers/models/vit/__pycache__/image_processing_vit_fast.cpython-313.pyc,, +transformers/models/vit/__pycache__/modeling_flax_vit.cpython-313.pyc,, +transformers/models/vit/__pycache__/modeling_tf_vit.cpython-313.pyc,, +transformers/models/vit/__pycache__/modeling_vit.cpython-313.pyc,, +transformers/models/vit/configuration_vit.py,sha256=qzjqndsRc6Pyd8YiTFvAbe-OIIJyRSPOdUFKVSJB2Fg,6290 +transformers/models/vit/feature_extraction_vit.py,sha256=v5PPSon24ldH0wC-42BQTxGakc-ow2aUh-Egq5D9hJw,1276 +transformers/models/vit/image_processing_vit.py,sha256=8U9XOAgxjOwLt9HeJtwhpqGNvDA4hM2ZiIQMjhKrHg4,14449 +transformers/models/vit/image_processing_vit_fast.py,sha256=yrkXCSNPpRXfBiQhsgLao-dFOALdBrWa4dDOwZvGiwQ,1237 +transformers/models/vit/modeling_flax_vit.py,sha256=95SBab3CAIoD3bwgWfN6Y7v44fR6EHNJVDuPqi-FOX8,25503 +transformers/models/vit/modeling_tf_vit.py,sha256=r__S8GF7AS7xbOpnVp_JOdE4ByH1H1WBnv1CZdGLTmE,37321 +transformers/models/vit/modeling_vit.py,sha256=Bp4JWMEOgGVRouHSQhzSbmv_IffGWuufDzwNveGojKU,28910 +transformers/models/vit_mae/__init__.py,sha256=C8NcxWwzXlNMeMOA9DNHfDYvRF9biIuUduuwhoaTTD8,1034 +transformers/models/vit_mae/__pycache__/__init__.cpython-313.pyc,, +transformers/models/vit_mae/__pycache__/configuration_vit_mae.cpython-313.pyc,, +transformers/models/vit_mae/__pycache__/modeling_tf_vit_mae.cpython-313.pyc,, +transformers/models/vit_mae/__pycache__/modeling_vit_mae.cpython-313.pyc,, +transformers/models/vit_mae/configuration_vit_mae.py,sha256=3nnWDAbp6WLfOHLO3taJUNEuGRlk3oAa0qaLEEJgjHQ,6372 +transformers/models/vit_mae/modeling_tf_vit_mae.py,sha256=GKXpysqOcon8KW-3KRQoG9mvLBb5b8mIk2Ix6VQyYhE,58009 +transformers/models/vit_mae/modeling_vit_mae.py,sha256=PSW194r_6IgNb6uFUf4ojqGSqnbc6hhh5DUiTBc0ZHk,40948 +transformers/models/vit_msn/__init__.py,sha256=Y1g56VRSNr-PxS-g4Cp2IlRR5M9CiaFGlhAQXwszGHo,995 +transformers/models/vit_msn/__pycache__/__init__.cpython-313.pyc,, +transformers/models/vit_msn/__pycache__/configuration_vit_msn.cpython-313.pyc,, +transformers/models/vit_msn/__pycache__/modeling_vit_msn.cpython-313.pyc,, +transformers/models/vit_msn/configuration_vit_msn.py,sha256=HeU0UloranISU9zLiPsK0CyooMacqogTNmwE4xp2N-o,4864 +transformers/models/vit_msn/modeling_vit_msn.py,sha256=qz3Z3PJOzoEsgZvR8hHkW31yX9iMLE8vsnj5jWhCK_w,24236 +transformers/models/vitdet/__init__.py,sha256=13LNGZwvKK3tBrQWVs43rQbxbgqvxLfnM0uMqomHqhM,993 +transformers/models/vitdet/__pycache__/__init__.cpython-313.pyc,, +transformers/models/vitdet/__pycache__/configuration_vitdet.cpython-313.pyc,, +transformers/models/vitdet/__pycache__/modeling_vitdet.cpython-313.pyc,, +transformers/models/vitdet/configuration_vitdet.py,sha256=CM18kVFmgjDEp7leQPG0L60VKNmBebmxYvEGZN4Kvlg,7541 +transformers/models/vitdet/modeling_vitdet.py,sha256=CqmO4bK5mRE-wNA6DhoYv6-ms0b4Xqhqo47-W7VS-XY,32365 +transformers/models/vitmatte/__init__.py,sha256=al6dWrth9LhRLjmVZrxSi0SRcMMUH_UNpMmR5nwflSc,1092 +transformers/models/vitmatte/__pycache__/__init__.cpython-313.pyc,, +transformers/models/vitmatte/__pycache__/configuration_vitmatte.cpython-313.pyc,, +transformers/models/vitmatte/__pycache__/image_processing_vitmatte.cpython-313.pyc,, +transformers/models/vitmatte/__pycache__/image_processing_vitmatte_fast.cpython-313.pyc,, +transformers/models/vitmatte/__pycache__/modeling_vitmatte.cpython-313.pyc,, +transformers/models/vitmatte/configuration_vitmatte.py,sha256=QRQbE4An9rWLaj4L1UgDo9JipXKVPyPPahJiMKXyQBQ,6498 +transformers/models/vitmatte/image_processing_vitmatte.py,sha256=5SxTOzvoLuVuB0qZlpJQM9ueeBm9mFTYXXsFDiq_hTo,14172 +transformers/models/vitmatte/image_processing_vitmatte_fast.py,sha256=hTo238U2zVVh-2IWVP5cYsJK6vXH1LaNyhHQmvWsRD4,6828 +transformers/models/vitmatte/modeling_vitmatte.py,sha256=Xw_A4MvjsM34PDy5XxkZ26mbYm7WP8wiYhbVc4O6k0Y,10618 +transformers/models/vitpose/__init__.py,sha256=VA7aRcVMgFJH46i6HurkXJS0Z38BotU3H3o3e2wgyXU,1039 +transformers/models/vitpose/__pycache__/__init__.cpython-313.pyc,, +transformers/models/vitpose/__pycache__/configuration_vitpose.cpython-313.pyc,, +transformers/models/vitpose/__pycache__/image_processing_vitpose.cpython-313.pyc,, +transformers/models/vitpose/__pycache__/modeling_vitpose.cpython-313.pyc,, +transformers/models/vitpose/configuration_vitpose.py,sha256=Ij3xe1cFLDOuIJYUTh-SP2UPKSf9pHwXlLwKRDtcqdc,6015 +transformers/models/vitpose/image_processing_vitpose.py,sha256=hsHxTJOi9q3CV76VvGwGTMVtEjeAyDdCpc1vZj1OlMw,29606 +transformers/models/vitpose/modeling_vitpose.py,sha256=AtYwmW5WassxYj1nYjfs71IPoalS6L0WUWLmpOJ_q6k,11683 +transformers/models/vitpose_backbone/__init__.py,sha256=W5IjP47Ykg5KRs8S9ztAbtfQ__n6sbJUZG4UDIGdGmA,577 +transformers/models/vitpose_backbone/__pycache__/__init__.cpython-313.pyc,, +transformers/models/vitpose_backbone/__pycache__/configuration_vitpose_backbone.cpython-313.pyc,, +transformers/models/vitpose_backbone/__pycache__/modeling_vitpose_backbone.cpython-313.pyc,, +transformers/models/vitpose_backbone/configuration_vitpose_backbone.py,sha256=k17SrNK_1I7cE43C3Cu2ZU5V5VnWQA7RsmOSSzXCEME,6651 +transformers/models/vitpose_backbone/modeling_vitpose_backbone.py,sha256=i8KBSXswCAUFuPLjYDVhq0uOy0Y7jXMWILOsK6OP-hU,19298 +transformers/models/vits/__init__.py,sha256=7baZcqGvFlYQxAl721XtMptMZKkzvBOa2ttyOhqhUtk,1026 +transformers/models/vits/__pycache__/__init__.cpython-313.pyc,, +transformers/models/vits/__pycache__/configuration_vits.cpython-313.pyc,, +transformers/models/vits/__pycache__/modeling_vits.cpython-313.pyc,, +transformers/models/vits/__pycache__/tokenization_vits.cpython-313.pyc,, +transformers/models/vits/configuration_vits.py,sha256=OT42q2ihf2Q9r9qm7JJM4gJlOqQSZyVH8Jk3Qsbcji0,13892 +transformers/models/vits/modeling_vits.py,sha256=E-hWgCI2LT1Y-EkF44ixlPCzzT88cbp7t5ZgYv-_tBM,61936 +transformers/models/vits/tokenization_vits.py,sha256=hMWf72PabgSlH-UJjN4-ddrNQGb8n5e7d5mSXuGTK9U,9369 +transformers/models/vivit/__init__.py,sha256=LT2FipIBdB69s9UY4viyuB5q2e0v3bCwtQMiOEOj2xg,1033 +transformers/models/vivit/__pycache__/__init__.cpython-313.pyc,, +transformers/models/vivit/__pycache__/configuration_vivit.cpython-313.pyc,, +transformers/models/vivit/__pycache__/image_processing_vivit.cpython-313.pyc,, +transformers/models/vivit/__pycache__/modeling_vivit.cpython-313.pyc,, +transformers/models/vivit/configuration_vivit.py,sha256=TVsjmzoXac2Xh0zcHS8fy0RmFivbol3WsO7kj-gKZik,5142 +transformers/models/vivit/image_processing_vivit.py,sha256=i4-HurrJ_6igx1IpIRE1WhKgCsupxw5z6o0sCKvVxa0,19265 +transformers/models/vivit/modeling_vivit.py,sha256=x46lNFWgTiYiFALbOg4tAtl0hYPdApCWzo8cMwchTKg,28598 +transformers/models/vjepa2/__init__.py,sha256=uG8tvHYoCxXAMjQuCfsT56YCg0l8_2e6H5Nm7L7Ygm0,1056 +transformers/models/vjepa2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/vjepa2/__pycache__/configuration_vjepa2.cpython-313.pyc,, +transformers/models/vjepa2/__pycache__/modeling_vjepa2.cpython-313.pyc,, +transformers/models/vjepa2/__pycache__/video_processing_vjepa2.cpython-313.pyc,, +transformers/models/vjepa2/configuration_vjepa2.py,sha256=mpPeIpTffMjlfOJ5m0mM_wj16znJAKUabh3S1EGvcoM,7055 +transformers/models/vjepa2/modeling_vjepa2.py,sha256=IE0u06fWlVrLMUg8w7d-VXHRrviU28yuAW7U_QHz8xI,48804 +transformers/models/vjepa2/video_processing_vjepa2.py,sha256=_I9Z3HJE5Bx__XtvlWv1jxBAK3vXvtHA9it15fkJEJI,1928 +transformers/models/voxtral/__init__.py,sha256=hARPigCbvhSlR0iKCDkHcUnsXtoCgXnLFSgZnw5HQus,1053 +transformers/models/voxtral/__pycache__/__init__.cpython-313.pyc,, +transformers/models/voxtral/__pycache__/configuration_voxtral.cpython-313.pyc,, +transformers/models/voxtral/__pycache__/modeling_voxtral.cpython-313.pyc,, +transformers/models/voxtral/__pycache__/modular_voxtral.cpython-313.pyc,, +transformers/models/voxtral/__pycache__/processing_voxtral.cpython-313.pyc,, +transformers/models/voxtral/configuration_voxtral.py,sha256=y_8_udDNsK5Zsy6JAkbIJ7QhMYCDLwHNwp1QhQcYmDs,8474 +transformers/models/voxtral/modeling_voxtral.py,sha256=R51I3peoSp9ToDkDH9VoFq2oE2DgVihamW86KW1J3fk,23538 +transformers/models/voxtral/modular_voxtral.py,sha256=ZCz05nTuH_iiBDGYA3NSr1axRHvZZY13PTcEVCzstVs,12052 +transformers/models/voxtral/processing_voxtral.py,sha256=MM-wkQejwvANTtNhDBxMYjPNjL7nlMh_-9yq-J09MCA,19847 +transformers/models/wav2vec2/__init__.py,sha256=5nXyY4dA0h9iNUQZrGAUXtjOnU6KbVq2B1gRzEGEUNI,1206 +transformers/models/wav2vec2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/wav2vec2/__pycache__/configuration_wav2vec2.cpython-313.pyc,, +transformers/models/wav2vec2/__pycache__/feature_extraction_wav2vec2.cpython-313.pyc,, +transformers/models/wav2vec2/__pycache__/modeling_flax_wav2vec2.cpython-313.pyc,, +transformers/models/wav2vec2/__pycache__/modeling_tf_wav2vec2.cpython-313.pyc,, +transformers/models/wav2vec2/__pycache__/modeling_wav2vec2.cpython-313.pyc,, +transformers/models/wav2vec2/__pycache__/processing_wav2vec2.cpython-313.pyc,, +transformers/models/wav2vec2/__pycache__/tokenization_wav2vec2.cpython-313.pyc,, +transformers/models/wav2vec2/configuration_wav2vec2.py,sha256=A4XGuSVpZUfWvd-ZJPkcGhISsBSNtSgUCNU2gN0mZos,20100 +transformers/models/wav2vec2/feature_extraction_wav2vec2.py,sha256=OTlRDKVJjkM2J93nN-PRW8xWetFO6Q7TsoRHLvew2pA,11609 +transformers/models/wav2vec2/modeling_flax_wav2vec2.py,sha256=kfZkGa6MYfKJpYvg1jDkMhEaB2ak9LIEvAOYRa_7H4s,57356 +transformers/models/wav2vec2/modeling_tf_wav2vec2.py,sha256=Q2x2AIfXMKL-c_pJpAYJNf_ZpOAXUYzcazcAbpqn2Xg,78572 +transformers/models/wav2vec2/modeling_wav2vec2.py,sha256=HBnRT5nGI8caqKBYI1l4oGqK9N77Xh9UMh96a3mE5XI,100972 +transformers/models/wav2vec2/processing_wav2vec2.py,sha256=KdHgpfu5Fv8ja2KRmoqr1urQ3mVe7mLqLf8kBswsgIA,8208 +transformers/models/wav2vec2/tokenization_wav2vec2.py,sha256=xaEOdRxousCdBFo4_snzcolYYFEype-apHZsIhhWxLc,38782 +transformers/models/wav2vec2_bert/__init__.py,sha256=DL010VL3ZV3lAugPH-BOTNSgIedotOEaoy8iHo0sC1Q,1051 +transformers/models/wav2vec2_bert/__pycache__/__init__.cpython-313.pyc,, +transformers/models/wav2vec2_bert/__pycache__/configuration_wav2vec2_bert.cpython-313.pyc,, +transformers/models/wav2vec2_bert/__pycache__/modeling_wav2vec2_bert.cpython-313.pyc,, +transformers/models/wav2vec2_bert/__pycache__/modular_wav2vec2_bert.cpython-313.pyc,, +transformers/models/wav2vec2_bert/__pycache__/processing_wav2vec2_bert.cpython-313.pyc,, +transformers/models/wav2vec2_bert/configuration_wav2vec2_bert.py,sha256=XpsUE-kvk4hTixrK5sbClT8A5j46A3XwzBcwWDs5E7g,18142 +transformers/models/wav2vec2_bert/modeling_wav2vec2_bert.py,sha256=hrYFh1l6hB_GdKF7D_ah1yL2Lw7xu-VmXGu-LF0XK0o,66073 +transformers/models/wav2vec2_bert/modular_wav2vec2_bert.py,sha256=nVQeAVL8MLzlX2axaBt4UThy2zHHGVprWAYW9LpjeK8,44929 +transformers/models/wav2vec2_bert/processing_wav2vec2_bert.py,sha256=3lt98gf2ZxejI17pvYNkish_634Qukb99-zQ7cOWqBU,7544 +transformers/models/wav2vec2_conformer/__init__.py,sha256=JBpapW8VF3yck4Bk29xKyUiQZqB_CXLSYtYxXGXAu2Q,1017 +transformers/models/wav2vec2_conformer/__pycache__/__init__.cpython-313.pyc,, +transformers/models/wav2vec2_conformer/__pycache__/configuration_wav2vec2_conformer.cpython-313.pyc,, +transformers/models/wav2vec2_conformer/__pycache__/modeling_wav2vec2_conformer.cpython-313.pyc,, +transformers/models/wav2vec2_conformer/__pycache__/modular_wav2vec2_conformer.cpython-313.pyc,, +transformers/models/wav2vec2_conformer/configuration_wav2vec2_conformer.py,sha256=gBeb_cZC5XCDOmg1llPUQ0ELDS-1u0_eGZRrA98tLxM,20938 +transformers/models/wav2vec2_conformer/modeling_wav2vec2_conformer.py,sha256=eR0Zh5AUECUBvjdg8KPX7PnXM1Mz9mKvjmnaJ_ASWho,85397 +transformers/models/wav2vec2_conformer/modular_wav2vec2_conformer.py,sha256=k94FX7pPHNr8RXy6_aeUnP9aRc1ksCJ4KdC8jj-zybI,30730 +transformers/models/wav2vec2_phoneme/__init__.py,sha256=LV4FKcFYNt0GuJvfsUOwTYVFRVfuzUuclKRybFyN9lk,967 +transformers/models/wav2vec2_phoneme/__pycache__/__init__.cpython-313.pyc,, +transformers/models/wav2vec2_phoneme/__pycache__/tokenization_wav2vec2_phoneme.cpython-313.pyc,, +transformers/models/wav2vec2_phoneme/tokenization_wav2vec2_phoneme.py,sha256=DdFdFOAOhnhy5Iq6U-eno8m-bgLbGC-2xiHauNhszd4,23217 +transformers/models/wav2vec2_with_lm/__init__.py,sha256=yZKHsma85j7AMLB8g8uNXL5D_E5Gc3Vqe-D-V2W15oY,965 +transformers/models/wav2vec2_with_lm/__pycache__/__init__.cpython-313.pyc,, +transformers/models/wav2vec2_with_lm/__pycache__/processing_wav2vec2_with_lm.cpython-313.pyc,, +transformers/models/wav2vec2_with_lm/processing_wav2vec2_with_lm.py,sha256=cxCksE0dl-HONpCWGgzV_gfr-SHiDMhuZaoPB-LTl5Q,30031 +transformers/models/wavlm/__init__.py,sha256=wYnYuOpw2e95lauqDbD7u3OC-Pez8yoRsrgExSh_WJQ,991 +transformers/models/wavlm/__pycache__/__init__.cpython-313.pyc,, +transformers/models/wavlm/__pycache__/configuration_wavlm.cpython-313.pyc,, +transformers/models/wavlm/__pycache__/modeling_wavlm.cpython-313.pyc,, +transformers/models/wavlm/__pycache__/modular_wavlm.cpython-313.pyc,, +transformers/models/wavlm/configuration_wavlm.py,sha256=HrK0dtsxcVB-k3yO2px6RS-TW1lonvx_x8uRkz7iJqQ,18588 +transformers/models/wavlm/modeling_wavlm.py,sha256=LYduqQ4d8uBJFJoki0fJ-KtJbb9ale5dqSIu1Dm2LN8,72561 +transformers/models/wavlm/modular_wavlm.py,sha256=hUsN1PF4AhTRvrwa9gI3KcLoJKdJcNU4j-JPE4e6VnA,23183 +transformers/models/whisper/__init__.py,sha256=qT70wGFDyOsAGuyaHe9if7kn8fxK2shCe6rovr3onw4,1244 +transformers/models/whisper/__pycache__/__init__.cpython-313.pyc,, +transformers/models/whisper/__pycache__/configuration_whisper.cpython-313.pyc,, +transformers/models/whisper/__pycache__/english_normalizer.cpython-313.pyc,, +transformers/models/whisper/__pycache__/feature_extraction_whisper.cpython-313.pyc,, +transformers/models/whisper/__pycache__/generation_whisper.cpython-313.pyc,, +transformers/models/whisper/__pycache__/modeling_flax_whisper.cpython-313.pyc,, +transformers/models/whisper/__pycache__/modeling_tf_whisper.cpython-313.pyc,, +transformers/models/whisper/__pycache__/modeling_whisper.cpython-313.pyc,, +transformers/models/whisper/__pycache__/processing_whisper.cpython-313.pyc,, +transformers/models/whisper/__pycache__/tokenization_whisper.cpython-313.pyc,, +transformers/models/whisper/__pycache__/tokenization_whisper_fast.cpython-313.pyc,, +transformers/models/whisper/configuration_whisper.py,sha256=CZgcgXbDdSSxvAwvJ0BmsVuhJLFOVgeGKnKn83yz978,17102 +transformers/models/whisper/english_normalizer.py,sha256=GmqBtyvGnsz2HXoksWAVu2wxJJJUclq-CSdH40jP51g,22857 +transformers/models/whisper/feature_extraction_whisper.py,sha256=wgnmafrg6l3pLhhG2HkOPW1mzij0zUBxeJwu7IjISmY,16732 +transformers/models/whisper/generation_whisper.py,sha256=9svDWZvMi5tRlLg8F8XR3eVo049vZBnKomGM857HOUk,110511 +transformers/models/whisper/modeling_flax_whisper.py,sha256=Sdt-z5ZQXvD6I501rsCdPHmX5UuVtz1nWCmpt8ub5YY,74027 +transformers/models/whisper/modeling_tf_whisper.py,sha256=AkTDnM5YLd2hN3yjUnB9ILQ5IOW44DCpCvXnTRvT42I,84663 +transformers/models/whisper/modeling_whisper.py,sha256=hRx48api-E7Q0KTf3tR4oFyY9D0mcytW_qLUrMeXKLg,73607 +transformers/models/whisper/processing_whisper.py,sha256=HqD-DDmkNIbHgDLFgky6hYu8WeuOv0kV6nFkDNBE4yg,3341 +transformers/models/whisper/tokenization_whisper.py,sha256=JxHC6ryKd8kjOLXOIjKf37jdEf48Y1McdrWcmotRLHc,58409 +transformers/models/whisper/tokenization_whisper_fast.py,sha256=Q6HdLOFUejR-aEIJZo0SkCkIQCtFL7eSSyvSEJWbaJ0,30284 +transformers/models/x_clip/__init__.py,sha256=ufjh6w7SNuNAUjAHp_MK3yRcrHm22-SfhZ0ZfbiXhGw,1030 +transformers/models/x_clip/__pycache__/__init__.cpython-313.pyc,, +transformers/models/x_clip/__pycache__/configuration_x_clip.cpython-313.pyc,, +transformers/models/x_clip/__pycache__/modeling_x_clip.cpython-313.pyc,, +transformers/models/x_clip/__pycache__/processing_x_clip.cpython-313.pyc,, +transformers/models/x_clip/configuration_x_clip.py,sha256=_KPmQFusZP23e1Kh31SxJiR9qaaGm_dt1SsDS7DKUGs,18232 +transformers/models/x_clip/modeling_x_clip.py,sha256=xvrQ5FummEpKtwQ60sE6tDHk9FkL1W_8z15fzlcE2aM,63671 +transformers/models/x_clip/processing_x_clip.py,sha256=LpeVPYVnsiO_ZSPyOxbTaRT4OToiWjVXU0ygaoqVtbs,2735 +transformers/models/xcodec/__init__.py,sha256=X16pTVB3loZ9OMnqHAaxInF1X5EOhxlmHjyxEvoqSWE,993 +transformers/models/xcodec/__pycache__/__init__.cpython-313.pyc,, +transformers/models/xcodec/__pycache__/configuration_xcodec.cpython-313.pyc,, +transformers/models/xcodec/__pycache__/modeling_xcodec.cpython-313.pyc,, +transformers/models/xcodec/configuration_xcodec.py,sha256=8V_u-p5JvxnpvH1ajT0GF5kb5Yh_FPrP9tydEusfHqQ,7932 +transformers/models/xcodec/modeling_xcodec.py,sha256=cVD7a8daBZLnu1ZO--mbGkVwc0zEyollfxeAkJTr9js,24209 +transformers/models/xglm/__init__.py,sha256=ZU7tQBmBXzr8wh9MJNDZ5uIrsCRQP8tuNrpGDd2W3OI,1142 +transformers/models/xglm/__pycache__/__init__.cpython-313.pyc,, +transformers/models/xglm/__pycache__/configuration_xglm.cpython-313.pyc,, +transformers/models/xglm/__pycache__/modeling_flax_xglm.cpython-313.pyc,, +transformers/models/xglm/__pycache__/modeling_tf_xglm.cpython-313.pyc,, +transformers/models/xglm/__pycache__/modeling_xglm.cpython-313.pyc,, +transformers/models/xglm/__pycache__/tokenization_xglm.cpython-313.pyc,, +transformers/models/xglm/__pycache__/tokenization_xglm_fast.cpython-313.pyc,, +transformers/models/xglm/configuration_xglm.py,sha256=m0sfPYf0qKl0pT9sOd3ssoQv13yt5IWRnUn11aGDa1Q,5881 +transformers/models/xglm/modeling_flax_xglm.py,sha256=BLW965ik0iRDR9fGbbw82SyuDxNs__9qKdQxZLPq7XI,33217 +transformers/models/xglm/modeling_tf_xglm.py,sha256=CHMd92nY-UsHA-lPDKOKdrPXHKUFWh3_7CDEGvmzy3E,45026 +transformers/models/xglm/modeling_xglm.py,sha256=wB6TSPD3hSbzyjnetdpqV3_5b92eH8QvWLRhWJYMeus,32780 +transformers/models/xglm/tokenization_xglm.py,sha256=lzdJEYP3S8w-HLa5nX7BAllqXQWnmr49kQsSYl3Cxe4,12576 +transformers/models/xglm/tokenization_xglm_fast.py,sha256=4G278mqxvBG0onsXTichXPRki94OiZJZ3Ioy4t6TfKQ,7470 +transformers/models/xlm/__init__.py,sha256=QevE83gMJ5h41H7EKxRAUN-kmE0zgOsyGj6QzWcpjmk,1058 +transformers/models/xlm/__pycache__/__init__.cpython-313.pyc,, +transformers/models/xlm/__pycache__/configuration_xlm.cpython-313.pyc,, +transformers/models/xlm/__pycache__/modeling_tf_xlm.cpython-313.pyc,, +transformers/models/xlm/__pycache__/modeling_xlm.cpython-313.pyc,, +transformers/models/xlm/__pycache__/tokenization_xlm.cpython-313.pyc,, +transformers/models/xlm/configuration_xlm.py,sha256=k216zyLI3r20HXsFfesj0QQiF-4oCxjUM3ONCxJZtzY,11062 +transformers/models/xlm/modeling_tf_xlm.py,sha256=kEPNqT6vET9FaGDUoo8-wfmCkmFIMnfU3XVPUyAVN8E,56527 +transformers/models/xlm/modeling_xlm.py,sha256=QXsgSyEFfftnS67zzrGriYKbnn1Ld32m75NMZ6tBy74,75495 +transformers/models/xlm/tokenization_xlm.py,sha256=UH0gOdFyLyiXC6_suxt9R9rWYwZjgFOhwnq6w3Rnq88,23408 +transformers/models/xlm_roberta/__init__.py,sha256=dhjej7PBi8UrfXRkTxh9CWXnw8wuLZHPT9FYFfCkIHg,1184 +transformers/models/xlm_roberta/__pycache__/__init__.cpython-313.pyc,, +transformers/models/xlm_roberta/__pycache__/configuration_xlm_roberta.cpython-313.pyc,, +transformers/models/xlm_roberta/__pycache__/modeling_flax_xlm_roberta.cpython-313.pyc,, +transformers/models/xlm_roberta/__pycache__/modeling_tf_xlm_roberta.cpython-313.pyc,, +transformers/models/xlm_roberta/__pycache__/modeling_xlm_roberta.cpython-313.pyc,, +transformers/models/xlm_roberta/__pycache__/tokenization_xlm_roberta.cpython-313.pyc,, +transformers/models/xlm_roberta/__pycache__/tokenization_xlm_roberta_fast.cpython-313.pyc,, +transformers/models/xlm_roberta/configuration_xlm_roberta.py,sha256=A_vz0mpN0KhV6dbuOO8FR5qFCXXYeSASknBS_kVLXPM,7596 +transformers/models/xlm_roberta/modeling_flax_xlm_roberta.py,sha256=ndYuXyoj_I5xVW2TWB5SrXt_D3hlqShvbOY-tW0eOkM,58777 +transformers/models/xlm_roberta/modeling_tf_xlm_roberta.py,sha256=pFUXeNncEmL8gNGKg3Mwkh6B-db2yKXgnf5ZvrlN-Go,81896 +transformers/models/xlm_roberta/modeling_xlm_roberta.py,sha256=lZTqyS04ctJbjuUp1k4cHdYTRVF0f1IFFHknIxJXjiU,71904 +transformers/models/xlm_roberta/tokenization_xlm_roberta.py,sha256=GYqlEjrwdzxZ1xdDuCpJRAKEZAIPuBdGWetCD7eXpzw,12804 +transformers/models/xlm_roberta/tokenization_xlm_roberta_fast.py,sha256=ojIIHjd2wELfeiFylqMYN0iYMeEaAtimZFOHVAFYkkM,7808 +transformers/models/xlm_roberta_xl/__init__.py,sha256=V0fXTKk2hQmf5dKogCJ0HSiRBxVX-rs7c414ZoZIh28,1009 +transformers/models/xlm_roberta_xl/__pycache__/__init__.cpython-313.pyc,, +transformers/models/xlm_roberta_xl/__pycache__/configuration_xlm_roberta_xl.cpython-313.pyc,, +transformers/models/xlm_roberta_xl/__pycache__/modeling_xlm_roberta_xl.cpython-313.pyc,, +transformers/models/xlm_roberta_xl/configuration_xlm_roberta_xl.py,sha256=f8cw938xnzVrNMZA9C6A0wIQm_mmtUr6EMQAgamN9Sw,7348 +transformers/models/xlm_roberta_xl/modeling_xlm_roberta_xl.py,sha256=H-bkQ_D7sfsq5vGRAW25vN54rqHTgfhWdRt1uRirMe0,67346 +transformers/models/xlnet/__init__.py,sha256=t-UvrFyorGF7VMuATzjrB_cUqKsM-8O9KqxiWjtJqhs,1109 +transformers/models/xlnet/__pycache__/__init__.cpython-313.pyc,, +transformers/models/xlnet/__pycache__/configuration_xlnet.cpython-313.pyc,, +transformers/models/xlnet/__pycache__/modeling_tf_xlnet.cpython-313.pyc,, +transformers/models/xlnet/__pycache__/modeling_xlnet.cpython-313.pyc,, +transformers/models/xlnet/__pycache__/tokenization_xlnet.cpython-313.pyc,, +transformers/models/xlnet/__pycache__/tokenization_xlnet_fast.cpython-313.pyc,, +transformers/models/xlnet/configuration_xlnet.py,sha256=U_WpCoqALv86cbvTXgTVnJwOfl3nzcGTgZJd_9SDhvY,10953 +transformers/models/xlnet/modeling_tf_xlnet.py,sha256=FjkGozmhkH6qIPhQO15VCkDJMWOVj2QD7aCR3Nb12wQ,77744 +transformers/models/xlnet/modeling_xlnet.py,sha256=jOOCwlzqD4b-JU6G0RSUqPF6QFrpfOanNjB3EItFW2w,107152 +transformers/models/xlnet/tokenization_xlnet.py,sha256=8GVn73lPjtz6PljvlLuwvuNCElWfbLHfHtdYaI8XrS8,15800 +transformers/models/xlnet/tokenization_xlnet_fast.py,sha256=ZfWE9TWuq8NC7Q3Z2y-plAofp0o9PtQEu08U4M7Qx6s,9247 +transformers/models/xlstm/__init__.py,sha256=-Vfj7bUcDAD3TguoDgKW0zpzZ8KtOmnUNwSkvL6Df8k,1047 +transformers/models/xlstm/__pycache__/__init__.cpython-313.pyc,, +transformers/models/xlstm/__pycache__/configuration_xlstm.cpython-313.pyc,, +transformers/models/xlstm/__pycache__/modeling_xlstm.cpython-313.pyc,, +transformers/models/xlstm/configuration_xlstm.py,sha256=6jEctcitQnoEpOb-ruRiq0OMmq1mSJkdbc3yMTeHMdY,12847 +transformers/models/xlstm/modeling_xlstm.py,sha256=yDYbYgohmUYC-TDm1ipDiasj5r7rqVbLck7nF9NZauM,66379 +transformers/models/xmod/__init__.py,sha256=WLxIbzC8oCEkMrerWHTy7GLopz0mqocSaacdcyb_BhQ,989 +transformers/models/xmod/__pycache__/__init__.cpython-313.pyc,, +transformers/models/xmod/__pycache__/configuration_xmod.cpython-313.pyc,, +transformers/models/xmod/__pycache__/modeling_xmod.cpython-313.pyc,, +transformers/models/xmod/configuration_xmod.py,sha256=W5bQLbTh3EhMd-Lvseyl28uQhkVVCdPSdhcRXJO7hcg,9180 +transformers/models/xmod/modeling_xmod.py,sha256=gNf0LKpksSuW2QSDQtzD0QBcuScF-qZkdZJ-5IO2LyE,68646 +transformers/models/yolos/__init__.py,sha256=UlbQDtMQJaGRcin-iz6NOEFWT8otanBndRuw4VrWUiQ,1124 +transformers/models/yolos/__pycache__/__init__.cpython-313.pyc,, +transformers/models/yolos/__pycache__/configuration_yolos.cpython-313.pyc,, +transformers/models/yolos/__pycache__/feature_extraction_yolos.cpython-313.pyc,, +transformers/models/yolos/__pycache__/image_processing_yolos.cpython-313.pyc,, +transformers/models/yolos/__pycache__/image_processing_yolos_fast.cpython-313.pyc,, +transformers/models/yolos/__pycache__/modeling_yolos.cpython-313.pyc,, +transformers/models/yolos/__pycache__/modular_yolos.cpython-313.pyc,, +transformers/models/yolos/configuration_yolos.py,sha256=3MosWcNOUgTJ1pTBkCQT852fsnIDHomISTyCShOKo2k,7627 +transformers/models/yolos/feature_extraction_yolos.py,sha256=5wVaZnDzK3ROFChjwHYMHGv1aPmtq1IOqmt100yImtE,1594 +transformers/models/yolos/image_processing_yolos.py,sha256=I_gQH5v6KCmM-o0DFVhcKThLxiSdcsmzC5XXCmmNhWA,67993 +transformers/models/yolos/image_processing_yolos_fast.py,sha256=Y-qfpXBEbSl2Pj75A4MUhG2Pqom6JKuUoShK2KmCb40,36609 +transformers/models/yolos/modeling_yolos.py,sha256=I23SChE6JPVUZZXuifPFd1HcfdiwSSrOfOTsNvnLYiM,30708 +transformers/models/yolos/modular_yolos.py,sha256=ec81AdiV3BwrEkb8tQZU_c9O9nntLj0YZ9TntYEEsTY,8239 +transformers/models/yoso/__init__.py,sha256=sCXsXYZuOQLFkZMexRb8qY7EJCftR54G_eO7qIUvdss,989 +transformers/models/yoso/__pycache__/__init__.cpython-313.pyc,, +transformers/models/yoso/__pycache__/configuration_yoso.cpython-313.pyc,, +transformers/models/yoso/__pycache__/modeling_yoso.cpython-313.pyc,, +transformers/models/yoso/configuration_yoso.py,sha256=6PQqt0OjHQBTNnnhDE761sdwlq9_tqG48UJ-pBV3rBM,6715 +transformers/models/yoso/modeling_yoso.py,sha256=GlEYsQNJm2Uj3j3dOPCR7GsDTNIu49nmqi6AJoCHdeI,49723 +transformers/models/zamba/__init__.py,sha256=iqZnf8BQ49TLcB4mYwIfuJeF4aGvYhOBRiGI6_74ZFk,991 +transformers/models/zamba/__pycache__/__init__.cpython-313.pyc,, +transformers/models/zamba/__pycache__/configuration_zamba.cpython-313.pyc,, +transformers/models/zamba/__pycache__/modeling_zamba.cpython-313.pyc,, +transformers/models/zamba/configuration_zamba.py,sha256=0sHrNCBHaMWoTLegdBSl2WFQBQtyMj4qb_XNO5cUM64,11292 +transformers/models/zamba/modeling_zamba.py,sha256=DeYNv2grFHL9aAMCBWiUaueynTAX1Wnt8NFX-uTKFHk,63188 +transformers/models/zamba2/__init__.py,sha256=3FgH8KelorllnKF6ncpKGREwZXt6YwsQ7NPS8W6jcmQ,993 +transformers/models/zamba2/__pycache__/__init__.cpython-313.pyc,, +transformers/models/zamba2/__pycache__/configuration_zamba2.cpython-313.pyc,, +transformers/models/zamba2/__pycache__/modeling_zamba2.cpython-313.pyc,, +transformers/models/zamba2/__pycache__/modular_zamba2.cpython-313.pyc,, +transformers/models/zamba2/configuration_zamba2.py,sha256=DNZAQNnBwPacTJy1yp7Dd8y9aW7HL3zFwa0xam1Fop8,12734 +transformers/models/zamba2/modeling_zamba2.py,sha256=rIilQ50ayrzmATdDqexIsBSDIODHFfGyQLtPY8r-Wr8,85124 +transformers/models/zamba2/modular_zamba2.py,sha256=4DXLXOTbqQR92R_PEeTZ8lRIAlzp26UXE3zhfPOjgfs,56003 +transformers/models/zoedepth/__init__.py,sha256=BUGUeWtpJJRRdQGT1dIOi-B5v89Ae8eTTxbEmVqiu0k,1092 +transformers/models/zoedepth/__pycache__/__init__.cpython-313.pyc,, +transformers/models/zoedepth/__pycache__/configuration_zoedepth.cpython-313.pyc,, +transformers/models/zoedepth/__pycache__/image_processing_zoedepth.cpython-313.pyc,, +transformers/models/zoedepth/__pycache__/image_processing_zoedepth_fast.cpython-313.pyc,, +transformers/models/zoedepth/__pycache__/modeling_zoedepth.cpython-313.pyc,, +transformers/models/zoedepth/configuration_zoedepth.py,sha256=gRQ2uRqmhfagYNHU3FkWJ4PPirYIe9Ve7GJ7ENW17mk,12972 +transformers/models/zoedepth/image_processing_zoedepth.py,sha256=GJNTg1Zvx7zTxLGvlcN1iGlhhfa5-WsZlJHoGVRZfG8,28203 +transformers/models/zoedepth/image_processing_zoedepth_fast.py,sha256=UxGdJjbLQ_yH7evC7Z4ldZt2I8dXPEk30YT4C32vXh4,13322 +transformers/models/zoedepth/modeling_zoedepth.py,sha256=_SGP87VHSdW3uPkG2fqna52q_h_c5FnKO7kz1UsVxSg,54555 +transformers/onnx/__init__.py,sha256=wALLY4TPOK2iPrFcfZf_WiEmTRAU6dAWHElxGdexr58,1548 +transformers/onnx/__main__.py,sha256=JZ9ZmeRsnDitwTMWb-dFT8W9AEmMoMKLQ3SvbyCkY0w,9497 +transformers/onnx/__pycache__/__init__.cpython-313.pyc,, +transformers/onnx/__pycache__/__main__.cpython-313.pyc,, +transformers/onnx/__pycache__/config.cpython-313.pyc,, +transformers/onnx/__pycache__/convert.cpython-313.pyc,, +transformers/onnx/__pycache__/features.cpython-313.pyc,, +transformers/onnx/__pycache__/utils.cpython-313.pyc,, +transformers/onnx/config.py,sha256=BAsRXnhp8q28Axi0NNpJELcXxId4eqm5Yyd_cWh9cgc,32627 +transformers/onnx/convert.py,sha256=1Skizwf9hyB2CQtNNwjwtuJT9EshegWQNcvPdJp4SNg,19418 +transformers/onnx/features.py,sha256=zRhGiYgzMMfvdh7UvCO9j_y0L9cbVbTSL88cItk_PBg,28276 +transformers/onnx/utils.py,sha256=39Uw_GkFBsTb6ZvMIHRTnI289aQDhc6hwfEapaBGE-o,3625 +transformers/optimization.py,sha256=QbCP-ynCaRYmsiXtOJppTMjmFICBghSjXyFbXDkWe_s,39971 +transformers/optimization_tf.py,sha256=JYL8tVbLyB3puGJ0b2i1gGaidCHSxm8_jYmm7z-ZJ-4,16718 +transformers/pipelines/__init__.py,sha256=uewHAiQR2oMlXCvaF-qXvX2sArzOvxHdcQ_3OgEyym4,85062 +transformers/pipelines/__pycache__/__init__.cpython-313.pyc,, +transformers/pipelines/__pycache__/audio_classification.cpython-313.pyc,, +transformers/pipelines/__pycache__/audio_utils.cpython-313.pyc,, +transformers/pipelines/__pycache__/automatic_speech_recognition.cpython-313.pyc,, +transformers/pipelines/__pycache__/base.cpython-313.pyc,, +transformers/pipelines/__pycache__/depth_estimation.cpython-313.pyc,, +transformers/pipelines/__pycache__/document_question_answering.cpython-313.pyc,, +transformers/pipelines/__pycache__/feature_extraction.cpython-313.pyc,, +transformers/pipelines/__pycache__/fill_mask.cpython-313.pyc,, +transformers/pipelines/__pycache__/image_classification.cpython-313.pyc,, +transformers/pipelines/__pycache__/image_feature_extraction.cpython-313.pyc,, +transformers/pipelines/__pycache__/image_segmentation.cpython-313.pyc,, +transformers/pipelines/__pycache__/image_text_to_text.cpython-313.pyc,, +transformers/pipelines/__pycache__/image_to_image.cpython-313.pyc,, +transformers/pipelines/__pycache__/image_to_text.cpython-313.pyc,, +transformers/pipelines/__pycache__/keypoint_matching.cpython-313.pyc,, +transformers/pipelines/__pycache__/mask_generation.cpython-313.pyc,, +transformers/pipelines/__pycache__/object_detection.cpython-313.pyc,, +transformers/pipelines/__pycache__/pt_utils.cpython-313.pyc,, +transformers/pipelines/__pycache__/question_answering.cpython-313.pyc,, +transformers/pipelines/__pycache__/table_question_answering.cpython-313.pyc,, +transformers/pipelines/__pycache__/text2text_generation.cpython-313.pyc,, +transformers/pipelines/__pycache__/text_classification.cpython-313.pyc,, +transformers/pipelines/__pycache__/text_generation.cpython-313.pyc,, +transformers/pipelines/__pycache__/text_to_audio.cpython-313.pyc,, +transformers/pipelines/__pycache__/token_classification.cpython-313.pyc,, +transformers/pipelines/__pycache__/video_classification.cpython-313.pyc,, +transformers/pipelines/__pycache__/visual_question_answering.cpython-313.pyc,, +transformers/pipelines/__pycache__/zero_shot_audio_classification.cpython-313.pyc,, +transformers/pipelines/__pycache__/zero_shot_classification.cpython-313.pyc,, +transformers/pipelines/__pycache__/zero_shot_image_classification.cpython-313.pyc,, +transformers/pipelines/__pycache__/zero_shot_object_detection.cpython-313.pyc,, +transformers/pipelines/audio_classification.py,sha256=9U6AmVAPfPInduBbdued0nXZpMdj2nWTgV3S0m6golo,11188 +transformers/pipelines/audio_utils.py,sha256=HLzdBFNQOfDyJJbvfMYXZTXlWcjTPLyWaPO6fUulQkk,12271 +transformers/pipelines/automatic_speech_recognition.py,sha256=3BVb2RoO5qinhMT1nCTW1PhANIrC3U0MPNy7Gy9g0ZY,33915 +transformers/pipelines/base.py,sha256=pTwHOzFrYIkMDXEblnKj5sPufhfvXaFixcoibJdPLzA,69144 +transformers/pipelines/depth_estimation.py,sha256=ARwIxnh4S-gn9OgOfJpfNgKpdCD0hQIZg1B925OpPt8,6170 +transformers/pipelines/document_question_answering.py,sha256=eJ1AMIGk-f2xwR9DJOwP_XPKgMjr2kpH_nCz9jlMWyc,25786 +transformers/pipelines/feature_extraction.py,sha256=x6ZmfVcoT9TNWaPyM5S-zep_cCgTsFBFxe7B8QdrVTs,3564 +transformers/pipelines/fill_mask.py,sha256=KR5SmztnwW2C_abrrsz4R58i-Ju61yQGGTVjNCn76Z8,12061 +transformers/pipelines/image_classification.py,sha256=LrAvZRwEC6Cqk9jlST1fD51VO67w6Hjm7q_gMdoxa7k,10232 +transformers/pipelines/image_feature_extraction.py,sha256=24ib-fmsdbN2ZX7PxJvAI3OX1z2x80X8A2IcSF6ynJI,4963 +transformers/pipelines/image_segmentation.py,sha256=BcswPSyXzbC_3IW6EYws10dwJxpBPS9JcrlbevBRD10,9976 +transformers/pipelines/image_text_to_text.py,sha256=Bfyy-UvXa2PgucQSQAOMDCzy1uoZqK_lG4RFy5lFMOs,24086 +transformers/pipelines/image_to_image.py,sha256=-4I7xiTDXcxptt8i4reI_n6rm1t-7QWwndm4MpJDD-0,5387 +transformers/pipelines/image_to_text.py,sha256=rn8nvu1PbKwzXYoA8QhPehP4kk8c5Z8urZnS0JzNTRU,10317 +transformers/pipelines/keypoint_matching.py,sha256=bjA8sqNFwR8OQX5oCYc8S5v6wWOhy9cD8u04kV8LNcg,7212 +transformers/pipelines/mask_generation.py,sha256=atgPYQq8Pzhd1BF-O7lbGHZ2X-9oAIKYmJ9XkevBd90,15513 +transformers/pipelines/object_detection.py,sha256=fLCAtvjdL_atexroUzkC9pfurffxQtblOSW5WMgkaoo,8642 +transformers/pipelines/pt_utils.py,sha256=DtNVgO1ud1n0bYMwJclMvnIJqtoNs9aedcXuCvJL_SM,12816 +transformers/pipelines/question_answering.py,sha256=iUXKuYPemRszay8ID279joBj1gi5fbsfu0t_9qh-laU,31081 +transformers/pipelines/table_question_answering.py,sha256=V6QtaXUjxtYc7KEXMba4tFMxPYqZeJOZpXlt3fS7ifE,20923 +transformers/pipelines/text2text_generation.py,sha256=S9NhbLvTm3cjVD-Ai6AOFrimeSty447OEKEcoeMqCys,18896 +transformers/pipelines/text_classification.py,sha256=LrmS2Etpkb_QCVjXpCxTYfA1Ds72uTUxSSUZ3rQ4yHs,11272 +transformers/pipelines/text_generation.py,sha256=hYSlVy2WzHHni_h0T-s-d2QM4iij_hDvY9_BKZU_7ss,27394 +transformers/pipelines/text_to_audio.py,sha256=50AlNLsRcnMcKYEAD2u9feaA7tfzaRGSuJw6dWCHFmw,11171 +transformers/pipelines/token_classification.py,sha256=ELCsAB3pnMBiu5jB-euIjaOv8LpgHFKaC2oX9a9ZDXM,30829 +transformers/pipelines/video_classification.py,sha256=lLXCowAUy7Hsy0AcyXOLJ0g_rZ5Yek1QYOa3McS0lD0,8236 +transformers/pipelines/visual_question_answering.py,sha256=v47WPs2TqnrmMuht-_48oOrLEuMoMtvmDUmmPhDoHVM,9819 +transformers/pipelines/zero_shot_audio_classification.py,sha256=4xyLE2eScVzbAWks8g2HOuXNyNMwqUSJTyANFHWYEFQ,7028 +transformers/pipelines/zero_shot_classification.py,sha256=svbtL0nOT3COZQa8DUVSW1QCK0Hx5ZkR5p91EC73rwA,12539 +transformers/pipelines/zero_shot_image_classification.py,sha256=YCTIDSWDmmCkJUj50KpnHLbMZ51cBjpgx_s47Zcnrj0,8622 +transformers/pipelines/zero_shot_object_detection.py,sha256=tJNu8fhYCsGNHdh4jiHlWOeUnhcVC8wNs5nvdB7Mp00,10740 +transformers/processing_utils.py,sha256=bCneXD_GdM8humN3o5y6-yuj5LwtEgLUuUT4z4QObzM,87156 +transformers/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +transformers/pytorch_utils.py,sha256=4XMBrwsRqomCKDII-3tK9ROFPWQ3EbGTKD0aQp1rK4Y,14852 +transformers/quantizers/__init__.py,sha256=S_xTSTbkDOvjLgR3jgR4EAkP_sc3NE8e38T-lllAaNo,800 +transformers/quantizers/__pycache__/__init__.cpython-313.pyc,, +transformers/quantizers/__pycache__/auto.cpython-313.pyc,, +transformers/quantizers/__pycache__/base.cpython-313.pyc,, +transformers/quantizers/__pycache__/quantizer_aqlm.cpython-313.pyc,, +transformers/quantizers/__pycache__/quantizer_auto_round.cpython-313.pyc,, +transformers/quantizers/__pycache__/quantizer_awq.cpython-313.pyc,, +transformers/quantizers/__pycache__/quantizer_bitnet.cpython-313.pyc,, +transformers/quantizers/__pycache__/quantizer_bnb_4bit.cpython-313.pyc,, +transformers/quantizers/__pycache__/quantizer_bnb_8bit.cpython-313.pyc,, +transformers/quantizers/__pycache__/quantizer_compressed_tensors.cpython-313.pyc,, +transformers/quantizers/__pycache__/quantizer_eetq.cpython-313.pyc,, +transformers/quantizers/__pycache__/quantizer_fbgemm_fp8.cpython-313.pyc,, +transformers/quantizers/__pycache__/quantizer_finegrained_fp8.cpython-313.pyc,, +transformers/quantizers/__pycache__/quantizer_fp_quant.cpython-313.pyc,, +transformers/quantizers/__pycache__/quantizer_gptq.cpython-313.pyc,, +transformers/quantizers/__pycache__/quantizer_higgs.cpython-313.pyc,, +transformers/quantizers/__pycache__/quantizer_hqq.cpython-313.pyc,, +transformers/quantizers/__pycache__/quantizer_mxfp4.cpython-313.pyc,, +transformers/quantizers/__pycache__/quantizer_quanto.cpython-313.pyc,, +transformers/quantizers/__pycache__/quantizer_quark.cpython-313.pyc,, +transformers/quantizers/__pycache__/quantizer_spqr.cpython-313.pyc,, +transformers/quantizers/__pycache__/quantizer_torchao.cpython-313.pyc,, +transformers/quantizers/__pycache__/quantizer_vptq.cpython-313.pyc,, +transformers/quantizers/__pycache__/quantizers_utils.cpython-313.pyc,, +transformers/quantizers/auto.py,sha256=Nr3GYO5ruu6UnJ06V-qjktNjhbAT5ccfn6BJARIuyrg,13804 +transformers/quantizers/base.py,sha256=rHK6XP8YkGSKHtBkhCVAgT5wrNu7QX4SsM2ip3ga8Rc,16334 +transformers/quantizers/quantizer_aqlm.py,sha256=t6XKZKBpYzNukrPXkU8AZtCg2j4uX4CYrCii4k2JVRI,3599 +transformers/quantizers/quantizer_auto_round.py,sha256=F2ctpnJwlUI1pnV4TGul4km2_PinHjCNIAadlZvkuLo,3068 +transformers/quantizers/quantizer_awq.py,sha256=W69vAaXoIUWWHVd_QeZ7dUKwfFa_2_ddA02-F-CBIcs,7414 +transformers/quantizers/quantizer_bitnet.py,sha256=9cQ6Nk8qXvgtgbMzmPPwpBu9MDfmqZ1xB6pnBx4aaQU,4669 +transformers/quantizers/quantizer_bnb_4bit.py,sha256=KFJNrsbGKI42jLBfTz-bHjU9g_xfqoAqT6KV2YjLkqo,16098 +transformers/quantizers/quantizer_bnb_8bit.py,sha256=94EXjl9woh8Gllw61ToS_rfHXzAiUXZIhGs0pHwq5NA,12570 +transformers/quantizers/quantizer_compressed_tensors.py,sha256=jU7HrDPFidB9E1aAvUWL_n98Woph7cBC1uit8Ka7Mt4,5082 +transformers/quantizers/quantizer_eetq.py,sha256=5HsstHPFjFosjNc4NcJuVhchka_NIfKc8M87lmM9n5E,7034 +transformers/quantizers/quantizer_fbgemm_fp8.py,sha256=qe_pYCQ5BdrOVIyc2Q2vkQb3ZHuRx4T-pkHHmZUFRqs,13607 +transformers/quantizers/quantizer_finegrained_fp8.py,sha256=WvwaXdqujw_CPRzajUBdSRhFnPBhjPu7ceUadWVV3iU,9481 +transformers/quantizers/quantizer_fp_quant.py,sha256=8Rf2LcDtumNsqNBRYH4bs_ZIqNANd4jbv9LMPBzLQ7o,7548 +transformers/quantizers/quantizer_gptq.py,sha256=81KDChWDLG-rA0V3TkpZEB1hUPVhq2asg_8XfcKoWwU,5585 +transformers/quantizers/quantizer_higgs.py,sha256=RGhg7cLlhgFmSTmPA9xsAUvSydSpX4T8oPOuq4qEzb8,8277 +transformers/quantizers/quantizer_hqq.py,sha256=fFvZIZCmycyY06PA6RpHw5EMK6ykJlYerx20MtoFxVI,11395 +transformers/quantizers/quantizer_mxfp4.py,sha256=l6Ha75phDPeNgUO4B98MEtxxEy4MAtfLowyLnfNo-oA,19225 +transformers/quantizers/quantizer_quanto.py,sha256=VKKNzoMXBACtCb4ZpjEKDX5A0_-OxjgviwrvXBU7uWE,6793 +transformers/quantizers/quantizer_quark.py,sha256=0dkQFwD8b8uS_vy8nRTaVhhVTLqG7eDWpGxLHYLJNDM,3528 +transformers/quantizers/quantizer_spqr.py,sha256=ctTswadhjIb-MHtmmhYgjx4pwFmbC59MptzXXI6___4,3212 +transformers/quantizers/quantizer_torchao.py,sha256=V1fqs9qmCwmH8pqwRtUpD9aRZfNC2wg5Q19d0Twm1qM,20572 +transformers/quantizers/quantizer_vptq.py,sha256=3jDRNytkYDJvnyq1vE656ElJpN9olkEyQB4ur0unQ-k,3722 +transformers/quantizers/quantizers_utils.py,sha256=gVf8Up7S6h8mrYHtwmcAJgBENhwQsh3x6cMmoPso6x8,878 +transformers/safetensors_conversion.py,sha256=LjnFRVfXRsOhIHdyiw6pevDJcMdsKwc3kvQ6csPs9wA,4074 +transformers/sagemaker/__init__.py,sha256=fKtKAHamz_CLL9jPGCa2E-1n8RmuS-58qGtzZuKc3qg,730 +transformers/sagemaker/__pycache__/__init__.cpython-313.pyc,, +transformers/sagemaker/__pycache__/trainer_sm.cpython-313.pyc,, +transformers/sagemaker/__pycache__/training_args_sm.cpython-313.pyc,, +transformers/sagemaker/trainer_sm.py,sha256=7GsKLtjdMfKp98OwHD7RcBsl745OOwHAaBswkfLkfsE,1044 +transformers/sagemaker/training_args_sm.py,sha256=u4ufd92WQzfhzHC7j7vgNOx3BcwNPzzFHgImeXHl48g,5410 +transformers/testing_utils.py,sha256=m0CM3WgV0A7W5GZ62lZACbiyi-4uvmjTuoj6QFtOyyc,151169 +transformers/tf_utils.py,sha256=uiS6uSPmB_ZUaxbV-vMkGy1roDTtY3ujpIgkwuskGmc,11390 +transformers/time_series_utils.py,sha256=fhc___L7NHqLzQ2lvrojW0yGkXJUTVqHGEAt5VDRqNA,7493 +transformers/tokenization_mistral_common.py,sha256=U-f69MiL63qob6z6MjGPh0TaN3J26vqioOzvZNElZYk,91444 +transformers/tokenization_utils.py,sha256=38xCQUA32GXSJWglnuwS3RDKpzdbLXoncHt6UCoI74A,47780 +transformers/tokenization_utils_base.py,sha256=yfJLlYZ46Lyif-bioflltRMlGAITpcSYUYFEOONgJaI,215810 +transformers/tokenization_utils_fast.py,sha256=VY5FTuaFDpDovF0XguNYeN_aHam_35l64RP6f_dxoPM,41383 +transformers/trainer.py,sha256=w-rI-ii9pjMP1N7hMef-fZvcD6NdnelE_OUsuXs3m6s,279527 +transformers/trainer_callback.py,sha256=YkfU5q-2K7G2RcmdaDLnajZOdSXiaCNKSsmJAot8hN8,33631 +transformers/trainer_pt_utils.py,sha256=MfApM3Cv-9DaHDOmFCdX-BpNT7v5AZrriSlYUNLC54Q,61426 +transformers/trainer_seq2seq.py,sha256=XnhGbtAwI0F4E9ynU5L80I4DP1DJeP8rOU1C6YzXE9E,18001 +transformers/trainer_utils.py,sha256=IOMtBe8i82arnOkcbb7CKQsuRvjadJ1S7uuvakNJvY4,34254 +transformers/training_args.py,sha256=hVkKJiy8gaz27O4GTis-71pxBa-QdfEtkx0tALh6cXA,162912 +transformers/training_args_seq2seq.py,sha256=J9_vJQR4VxWAHWVbRmxjXHSRLd6KSe8inisIVezlbXI,3896 +transformers/training_args_tf.py,sha256=4WYwTKApwnjndGhEDL_NNOtvQ6VIa5w_ZZEBzAAN4Qg,14604 +transformers/utils/__init__.py,sha256=RueiNTpYNmGkkt_pQNEZ3gA1JnmeVoUdi-tFN6HmUmM,10400 +transformers/utils/__pycache__/__init__.cpython-313.pyc,, +transformers/utils/__pycache__/attention_visualizer.cpython-313.pyc,, +transformers/utils/__pycache__/auto_docstring.cpython-313.pyc,, +transformers/utils/__pycache__/backbone_utils.cpython-313.pyc,, +transformers/utils/__pycache__/bitsandbytes.cpython-313.pyc,, +transformers/utils/__pycache__/chat_template_utils.cpython-313.pyc,, +transformers/utils/__pycache__/constants.cpython-313.pyc,, +transformers/utils/__pycache__/deprecation.cpython-313.pyc,, +transformers/utils/__pycache__/doc.cpython-313.pyc,, +transformers/utils/__pycache__/dummy_detectron2_objects.cpython-313.pyc,, +transformers/utils/__pycache__/dummy_essentia_and_librosa_and_pretty_midi_and_scipy_and_torch_objects.cpython-313.pyc,, +transformers/utils/__pycache__/dummy_flax_objects.cpython-313.pyc,, +transformers/utils/__pycache__/dummy_mistral_common_objects.cpython-313.pyc,, +transformers/utils/__pycache__/dummy_music_objects.cpython-313.pyc,, +transformers/utils/__pycache__/dummy_pt_objects.cpython-313.pyc,, +transformers/utils/__pycache__/dummy_sentencepiece_and_tokenizers_objects.cpython-313.pyc,, +transformers/utils/__pycache__/dummy_sentencepiece_objects.cpython-313.pyc,, +transformers/utils/__pycache__/dummy_speech_objects.cpython-313.pyc,, +transformers/utils/__pycache__/dummy_tensorflow_text_objects.cpython-313.pyc,, +transformers/utils/__pycache__/dummy_tf_objects.cpython-313.pyc,, +transformers/utils/__pycache__/dummy_timm_and_torchvision_objects.cpython-313.pyc,, +transformers/utils/__pycache__/dummy_tokenizers_objects.cpython-313.pyc,, +transformers/utils/__pycache__/dummy_torchaudio_objects.cpython-313.pyc,, +transformers/utils/__pycache__/dummy_torchvision_objects.cpython-313.pyc,, +transformers/utils/__pycache__/dummy_vision_objects.cpython-313.pyc,, +transformers/utils/__pycache__/fx.cpython-313.pyc,, +transformers/utils/__pycache__/generic.cpython-313.pyc,, +transformers/utils/__pycache__/hp_naming.cpython-313.pyc,, +transformers/utils/__pycache__/hub.cpython-313.pyc,, +transformers/utils/__pycache__/import_utils.cpython-313.pyc,, +transformers/utils/__pycache__/logging.cpython-313.pyc,, +transformers/utils/__pycache__/metrics.cpython-313.pyc,, +transformers/utils/__pycache__/model_parallel_utils.cpython-313.pyc,, +transformers/utils/__pycache__/notebook.cpython-313.pyc,, +transformers/utils/__pycache__/peft_utils.cpython-313.pyc,, +transformers/utils/__pycache__/quantization_config.cpython-313.pyc,, +transformers/utils/__pycache__/sentencepiece_model_pb2.cpython-313.pyc,, +transformers/utils/__pycache__/sentencepiece_model_pb2_new.cpython-313.pyc,, +transformers/utils/__pycache__/versions.cpython-313.pyc,, +transformers/utils/attention_visualizer.py,sha256=3cFo4QHuidz5HThcqh6e-FeShTmcZizqt7F6Vj6Wxaw,10002 +transformers/utils/auto_docstring.py,sha256=Wsc50zaeZGtzgXve7L9Mifn7wDvne0s8Cx3nawQl7FE,82036 +transformers/utils/backbone_utils.py,sha256=VBTJq-x9JZBfV1ftRcUqK3PnsVjVdzJetRBwTBouggM,17722 +transformers/utils/bitsandbytes.py,sha256=LzOKwcHWAxxZZv-7Ts9Q0vlEYvHd18affVgVbiR3Tzs,1040 +transformers/utils/chat_template_utils.py,sha256=ph0p2YRJSZmg8aKAFDQi4oRpZHJ6TMZvcuEs_x2ZLuI,22775 +transformers/utils/constants.py,sha256=sZsUwOnA3CbtN1svs9YoaNLTTsAc9RVaITsgpf8K4iI,282 +transformers/utils/deprecation.py,sha256=rsbc7bbHPmvePSmkpf_nXQ7OIX6ITFSK6nJxHvu0bY4,8065 +transformers/utils/doc.py,sha256=K5MgXYi1j1Bd5OU-ho57ojgp1UftHN4Yu2mpj3x3lQA,52480 +transformers/utils/dummy_detectron2_objects.py,sha256=n7Pt_7sbVBNfohKGcOARB-ZcPcJRbjEAcoLd2vTXndU,340 +transformers/utils/dummy_essentia_and_librosa_and_pretty_midi_and_scipy_and_torch_objects.py,sha256=n6pY4s7zCII3dzo7Ejd0RviHa_pMateuDEwbbHgsTUY,902 +transformers/utils/dummy_flax_objects.py,sha256=lqW9EJzfDmsx7Uj4cm4UHUUwcYI9SFm8-biApCP40HQ,2652 +transformers/utils/dummy_mistral_common_objects.py,sha256=a43f12WAikWuVMOnFPTA1A2rvI9gi2a4POyuBLLFVEs,311 +transformers/utils/dummy_music_objects.py,sha256=1lxIebYUOdHJWMQ_T5IQgPgcO_wp_8YM_HGc3skuGVg,458 +transformers/utils/dummy_pt_objects.py,sha256=NX6GyTm_tdvcxYUpBpa7LhgMcNfRG-SuGKHkg9FE5us,14933 +transformers/utils/dummy_sentencepiece_and_tokenizers_objects.py,sha256=BgPLr8Wz8A-17K86x04N21CKXtWNQLJEWx2c4aZRqaA,286 +transformers/utils/dummy_sentencepiece_objects.py,sha256=pBykNNg9IPDeshVOeaw4sxHvgmt3by9X4rIQtz0ONYg,6455 +transformers/utils/dummy_speech_objects.py,sha256=9eFm1cjdsYOPBoAz9JTgP35Bg8WF2C9AZ_y1hFpKZdQ,465 +transformers/utils/dummy_tensorflow_text_objects.py,sha256=43V0IA2kb9gtuL0S1OL1eRFFxzQwKg4pPjMVuXUB5qg,306 +transformers/utils/dummy_tf_objects.py,sha256=8ZPa6w8h-VzRDzwOO9xK26u9evz3T8bkxSLhgxI-lKU,4139 +transformers/utils/dummy_timm_and_torchvision_objects.py,sha256=EFuC5z6IsKOqqowoUGviJ3KgTjzvdTTN7gGQ3it-4t0,324 +transformers/utils/dummy_tokenizers_objects.py,sha256=PFIh5nBDmhWG2XDGuwIyBGldm6b_jdZdL3E8t5A8FsY,304 +transformers/utils/dummy_torchaudio_objects.py,sha256=EG0q0JkedoNb_4ntsf6EyTOE6Nr1whvHOzHPKy1t7x0,847 +transformers/utils/dummy_torchvision_objects.py,sha256=BaUQGsNL0Xfj-HP-pOVXSKYw5UFaNlWD_Iso9D8muGw,479 +transformers/utils/dummy_vision_objects.py,sha256=GDbX7-GrqykExLY91SMhSf508DinS5NSFfavbeDsCMU,630 +transformers/utils/fx.py,sha256=zCoZAdT1XNnypOj2fCQMX-ADlBYQlmJKrkJvesarxME,56930 +transformers/utils/generic.py,sha256=-_fjYoDpm6knoiU9aZD9D1mADPsd6cmAiLn-DSUyJ3s,41874 +transformers/utils/hp_naming.py,sha256=vqcOXcDOyqbISWo8-ClUJUOBVbZM1h08EcymTwcRthc,4979 +transformers/utils/hub.py,sha256=vI9J6TYLkExDgsgwtYtbcNffXtZv2Qspi_sO3CPi6EY,50071 +transformers/utils/import_utils.py,sha256=ZrKsQL4sU429lAk3DB9r92mTS4sjLtrDZeFimcXsEQg,108811 +transformers/utils/logging.py,sha256=izaHM1YuNFG1dsZZ8rbbRfBB897o2BgNFscrMlOVAII,12219 +transformers/utils/metrics.py,sha256=bL6xW97M-trUf0uSSZnYnZ-1_u_RxRiIg5BJY0bnh30,15402 +transformers/utils/model_parallel_utils.py,sha256=dmPsjrVGLxwYHsGXyvFQrcl-aZRQA5hydi4I7_sBAoo,2257 +transformers/utils/notebook.py,sha256=Gkg0GxMhbrTk21Fp6aXVeTuVk49yPDWuGbLc4OCHrPo,15796 +transformers/utils/peft_utils.py,sha256=7XZBVmD_gcZl2hjwTRYztGs-WEHfRc53rqNuUdAlzl0,5193 +transformers/utils/quantization_config.py,sha256=MK8CU9pBIqA8TXWMraDfrM3YndtyW39pj0W-E13bU1g,95861 +transformers/utils/sentencepiece_model_pb2.py,sha256=WcMZRm2-571XwxSfo-6FZih9fDy_Zl5mMwqrDrC1Dlg,50663 +transformers/utils/sentencepiece_model_pb2_new.py,sha256=ahaV--amhGIL3nXFCTHqezqxuGXm8SHr_C3Zvj7KbAY,6598 +transformers/utils/versions.py,sha256=C-Tqr4qGSHH64ygIBCSo8gA6azz7Dbzh8zdc_yjMkX8,4337 +transformers/video_processing_utils.py,sha256=Bn3JWf6ADVpaWrOcVLU4F6S1lCTzFDZ2fXNhwgs7pAk,41231 +transformers/video_utils.py,sha256=O8F09M9oEV6tOZtJUh38aZsb9gVeWv6aQ3L9mO2Bnb4,33856 diff --git a/venv/lib/python3.13/site-packages/transformers-4.57.2.dist-info/REQUESTED b/venv/lib/python3.13/site-packages/transformers-4.57.2.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.13/site-packages/transformers-4.57.2.dist-info/WHEEL b/venv/lib/python3.13/site-packages/transformers-4.57.2.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..e7fa31b6f3f78deb1022c1f7927f07d4d16da822 --- /dev/null +++ b/venv/lib/python3.13/site-packages/transformers-4.57.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.13/site-packages/transformers-4.57.2.dist-info/entry_points.txt b/venv/lib/python3.13/site-packages/transformers-4.57.2.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..47f047547c476346d81519e9d380af1a6be26155 --- /dev/null +++ b/venv/lib/python3.13/site-packages/transformers-4.57.2.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +transformers = transformers.commands.transformers_cli:main +transformers-cli = transformers.commands.transformers_cli:main_cli diff --git a/venv/lib/python3.13/site-packages/transformers-4.57.2.dist-info/top_level.txt b/venv/lib/python3.13/site-packages/transformers-4.57.2.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..976a2b1f3998279c10c413279a095be86bf69167 --- /dev/null +++ b/venv/lib/python3.13/site-packages/transformers-4.57.2.dist-info/top_level.txt @@ -0,0 +1 @@ +transformers diff --git a/venv/lib/python3.13/site-packages/typing_extensions-4.15.0.dist-info/INSTALLER b/venv/lib/python3.13/site-packages/typing_extensions-4.15.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.13/site-packages/typing_extensions-4.15.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.13/site-packages/typing_extensions-4.15.0.dist-info/METADATA b/venv/lib/python3.13/site-packages/typing_extensions-4.15.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..b09cb50e1f9a42a8eb58a4339cbdb26250368375 --- /dev/null +++ b/venv/lib/python3.13/site-packages/typing_extensions-4.15.0.dist-info/METADATA @@ -0,0 +1,72 @@ +Metadata-Version: 2.4 +Name: typing_extensions +Version: 4.15.0 +Summary: Backported and Experimental Type Hints for Python 3.9+ +Keywords: annotations,backport,checker,checking,function,hinting,hints,type,typechecking,typehinting,typehints,typing +Author-email: "Guido van Rossum, Jukka Lehtosalo, Łukasz Langa, Michael Lee" +Requires-Python: >=3.9 +Description-Content-Type: text/markdown +License-Expression: PSF-2.0 +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Topic :: Software Development +License-File: LICENSE +Project-URL: Bug Tracker, https://github.com/python/typing_extensions/issues +Project-URL: Changes, https://github.com/python/typing_extensions/blob/main/CHANGELOG.md +Project-URL: Documentation, https://typing-extensions.readthedocs.io/ +Project-URL: Home, https://github.com/python/typing_extensions +Project-URL: Q & A, https://github.com/python/typing/discussions +Project-URL: Repository, https://github.com/python/typing_extensions + +# Typing Extensions + +[![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing) + +[Documentation](https://typing-extensions.readthedocs.io/en/latest/#) – +[PyPI](https://pypi.org/project/typing-extensions/) + +## Overview + +The `typing_extensions` module serves two related purposes: + +- Enable use of new type system features on older Python versions. For example, + `typing.TypeGuard` is new in Python 3.10, but `typing_extensions` allows + users on previous Python versions to use it too. +- Enable experimentation with new type system PEPs before they are accepted and + added to the `typing` module. + +`typing_extensions` is treated specially by static type checkers such as +mypy and pyright. Objects defined in `typing_extensions` are treated the same +way as equivalent forms in `typing`. + +`typing_extensions` uses +[Semantic Versioning](https://semver.org/). The +major version will be incremented only for backwards-incompatible changes. +Therefore, it's safe to depend +on `typing_extensions` like this: `typing_extensions ~=x.y`, +where `x.y` is the first version that includes all features you need. +[This](https://packaging.python.org/en/latest/specifications/version-specifiers/#compatible-release) +is equivalent to `typing_extensions >=x.y, <(x+1)`. Do not depend on `~= x.y.z` +unless you really know what you're doing; that defeats the purpose of +semantic versioning. + +## Included items + +See [the documentation](https://typing-extensions.readthedocs.io/en/latest/#) for a +complete listing of module contents. + +## Contributing + +See [CONTRIBUTING.md](https://github.com/python/typing_extensions/blob/main/CONTRIBUTING.md) +for how to contribute to `typing_extensions`. + diff --git a/venv/lib/python3.13/site-packages/typing_extensions-4.15.0.dist-info/RECORD b/venv/lib/python3.13/site-packages/typing_extensions-4.15.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..c56d5f549245f5f04f212d4ef510c0ad5ef4e639 --- /dev/null +++ b/venv/lib/python3.13/site-packages/typing_extensions-4.15.0.dist-info/RECORD @@ -0,0 +1,7 @@ +__pycache__/typing_extensions.cpython-313.pyc,, +typing_extensions-4.15.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +typing_extensions-4.15.0.dist-info/METADATA,sha256=wTg3j-jxiTSsmd4GBTXFPsbBOu7WXpTDJkHafuMZKnI,3259 +typing_extensions-4.15.0.dist-info/RECORD,, +typing_extensions-4.15.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82 +typing_extensions-4.15.0.dist-info/licenses/LICENSE,sha256=Oy-B_iHRgcSZxZolbI4ZaEVdZonSaaqFNzv7avQdo78,13936 +typing_extensions.py,sha256=Qz0R0XDTok0usGXrwb_oSM6n49fOaFZ6tSvqLUwvftg,160429 diff --git a/venv/lib/python3.13/site-packages/typing_extensions-4.15.0.dist-info/WHEEL b/venv/lib/python3.13/site-packages/typing_extensions-4.15.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..d8b9936dad9ab2513fa6979f411560d3b6b57e37 --- /dev/null +++ b/venv/lib/python3.13/site-packages/typing_extensions-4.15.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.12.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.13/site-packages/urllib3/__init__.py b/venv/lib/python3.13/site-packages/urllib3/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3fe782c8a45bbabcf240f3cac4303ac12b0ec274 --- /dev/null +++ b/venv/lib/python3.13/site-packages/urllib3/__init__.py @@ -0,0 +1,211 @@ +""" +Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more +""" + +from __future__ import annotations + +# Set default logging handler to avoid "No handler found" warnings. +import logging +import sys +import typing +import warnings +from logging import NullHandler + +from . import exceptions +from ._base_connection import _TYPE_BODY +from ._collections import HTTPHeaderDict +from ._version import __version__ +from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url +from .filepost import _TYPE_FIELDS, encode_multipart_formdata +from .poolmanager import PoolManager, ProxyManager, proxy_from_url +from .response import BaseHTTPResponse, HTTPResponse +from .util.request import make_headers +from .util.retry import Retry +from .util.timeout import Timeout + +# Ensure that Python is compiled with OpenSSL 1.1.1+ +# If the 'ssl' module isn't available at all that's +# fine, we only care if the module is available. +try: + import ssl +except ImportError: + pass +else: + if not ssl.OPENSSL_VERSION.startswith("OpenSSL "): # Defensive: + warnings.warn( + "urllib3 v2 only supports OpenSSL 1.1.1+, currently " + f"the 'ssl' module is compiled with {ssl.OPENSSL_VERSION!r}. " + "See: https://github.com/urllib3/urllib3/issues/3020", + exceptions.NotOpenSSLWarning, + ) + elif ssl.OPENSSL_VERSION_INFO < (1, 1, 1): # Defensive: + raise ImportError( + "urllib3 v2 only supports OpenSSL 1.1.1+, currently " + f"the 'ssl' module is compiled with {ssl.OPENSSL_VERSION!r}. " + "See: https://github.com/urllib3/urllib3/issues/2168" + ) + +__author__ = "Andrey Petrov (andrey.petrov@shazow.net)" +__license__ = "MIT" +__version__ = __version__ + +__all__ = ( + "HTTPConnectionPool", + "HTTPHeaderDict", + "HTTPSConnectionPool", + "PoolManager", + "ProxyManager", + "HTTPResponse", + "Retry", + "Timeout", + "add_stderr_logger", + "connection_from_url", + "disable_warnings", + "encode_multipart_formdata", + "make_headers", + "proxy_from_url", + "request", + "BaseHTTPResponse", +) + +logging.getLogger(__name__).addHandler(NullHandler()) + + +def add_stderr_logger( + level: int = logging.DEBUG, +) -> logging.StreamHandler[typing.TextIO]: + """ + Helper for quickly adding a StreamHandler to the logger. Useful for + debugging. + + Returns the handler after adding it. + """ + # This method needs to be in this __init__.py to get the __name__ correct + # even if urllib3 is vendored within another package. + logger = logging.getLogger(__name__) + handler = logging.StreamHandler() + handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s")) + logger.addHandler(handler) + logger.setLevel(level) + logger.debug("Added a stderr logging handler to logger: %s", __name__) + return handler + + +# ... Clean up. +del NullHandler + + +# All warning filters *must* be appended unless you're really certain that they +# shouldn't be: otherwise, it's very hard for users to use most Python +# mechanisms to silence them. +# SecurityWarning's always go off by default. +warnings.simplefilter("always", exceptions.SecurityWarning, append=True) +# InsecurePlatformWarning's don't vary between requests, so we keep it default. +warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True) + + +def disable_warnings(category: type[Warning] = exceptions.HTTPWarning) -> None: + """ + Helper for quickly disabling all urllib3 warnings. + """ + warnings.simplefilter("ignore", category) + + +_DEFAULT_POOL = PoolManager() + + +def request( + method: str, + url: str, + *, + body: _TYPE_BODY | None = None, + fields: _TYPE_FIELDS | None = None, + headers: typing.Mapping[str, str] | None = None, + preload_content: bool | None = True, + decode_content: bool | None = True, + redirect: bool | None = True, + retries: Retry | bool | int | None = None, + timeout: Timeout | float | int | None = 3, + json: typing.Any | None = None, +) -> BaseHTTPResponse: + """ + A convenience, top-level request method. It uses a module-global ``PoolManager`` instance. + Therefore, its side effects could be shared across dependencies relying on it. + To avoid side effects create a new ``PoolManager`` instance and use it instead. + The method does not accept low-level ``**urlopen_kw`` keyword arguments. + + :param method: + HTTP request method (such as GET, POST, PUT, etc.) + + :param url: + The URL to perform the request on. + + :param body: + Data to send in the request body, either :class:`str`, :class:`bytes`, + an iterable of :class:`str`/:class:`bytes`, or a file-like object. + + :param fields: + Data to encode and send in the request body. + + :param headers: + Dictionary of custom headers to send, such as User-Agent, + If-None-Match, etc. + + :param bool preload_content: + If True, the response's body will be preloaded into memory. + + :param bool decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + + :param redirect: + If True, automatically handle redirects (status codes 301, 302, + 303, 307, 308). Each redirect counts as a retry. Disabling retries + will disable redirect, too. + + :param retries: + Configure the number of retries to allow before raising a + :class:`~urllib3.exceptions.MaxRetryError` exception. + + If ``None`` (default) will retry 3 times, see ``Retry.DEFAULT``. Pass a + :class:`~urllib3.util.retry.Retry` object for fine-grained control + over different types of retries. + Pass an integer number to retry connection errors that many times, + but no other types of errors. Pass zero to never retry. + + If ``False``, then retries are disabled and any exception is raised + immediately. Also, instead of raising a MaxRetryError on redirects, + the redirect response will be returned. + + :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. + + :param timeout: + If specified, overrides the default timeout for this one + request. It may be a float (in seconds) or an instance of + :class:`urllib3.util.Timeout`. + + :param json: + Data to encode and send as JSON with UTF-encoded in the request body. + The ``"Content-Type"`` header will be set to ``"application/json"`` + unless specified otherwise. + """ + + return _DEFAULT_POOL.request( + method, + url, + body=body, + fields=fields, + headers=headers, + preload_content=preload_content, + decode_content=decode_content, + redirect=redirect, + retries=retries, + timeout=timeout, + json=json, + ) + + +if sys.platform == "emscripten": + from .contrib.emscripten import inject_into_urllib3 # noqa: 401 + + inject_into_urllib3() diff --git a/venv/lib/python3.13/site-packages/urllib3/_base_connection.py b/venv/lib/python3.13/site-packages/urllib3/_base_connection.py new file mode 100644 index 0000000000000000000000000000000000000000..dc0f318c0b380926eed0f4209d395c79963eaf9e --- /dev/null +++ b/venv/lib/python3.13/site-packages/urllib3/_base_connection.py @@ -0,0 +1,165 @@ +from __future__ import annotations + +import typing + +from .util.connection import _TYPE_SOCKET_OPTIONS +from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT +from .util.url import Url + +_TYPE_BODY = typing.Union[bytes, typing.IO[typing.Any], typing.Iterable[bytes], str] + + +class ProxyConfig(typing.NamedTuple): + ssl_context: ssl.SSLContext | None + use_forwarding_for_https: bool + assert_hostname: None | str | typing.Literal[False] + assert_fingerprint: str | None + + +class _ResponseOptions(typing.NamedTuple): + # TODO: Remove this in favor of a better + # HTTP request/response lifecycle tracking. + request_method: str + request_url: str + preload_content: bool + decode_content: bool + enforce_content_length: bool + + +if typing.TYPE_CHECKING: + import ssl + from typing import Protocol + + from .response import BaseHTTPResponse + + class BaseHTTPConnection(Protocol): + default_port: typing.ClassVar[int] + default_socket_options: typing.ClassVar[_TYPE_SOCKET_OPTIONS] + + host: str + port: int + timeout: None | ( + float + ) # Instance doesn't store _DEFAULT_TIMEOUT, must be resolved. + blocksize: int + source_address: tuple[str, int] | None + socket_options: _TYPE_SOCKET_OPTIONS | None + + proxy: Url | None + proxy_config: ProxyConfig | None + + is_verified: bool + proxy_is_verified: bool | None + + def __init__( + self, + host: str, + port: int | None = None, + *, + timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, + source_address: tuple[str, int] | None = None, + blocksize: int = 8192, + socket_options: _TYPE_SOCKET_OPTIONS | None = ..., + proxy: Url | None = None, + proxy_config: ProxyConfig | None = None, + ) -> None: ... + + def set_tunnel( + self, + host: str, + port: int | None = None, + headers: typing.Mapping[str, str] | None = None, + scheme: str = "http", + ) -> None: ... + + def connect(self) -> None: ... + + def request( + self, + method: str, + url: str, + body: _TYPE_BODY | None = None, + headers: typing.Mapping[str, str] | None = None, + # We know *at least* botocore is depending on the order of the + # first 3 parameters so to be safe we only mark the later ones + # as keyword-only to ensure we have space to extend. + *, + chunked: bool = False, + preload_content: bool = True, + decode_content: bool = True, + enforce_content_length: bool = True, + ) -> None: ... + + def getresponse(self) -> BaseHTTPResponse: ... + + def close(self) -> None: ... + + @property + def is_closed(self) -> bool: + """Whether the connection either is brand new or has been previously closed. + If this property is True then both ``is_connected`` and ``has_connected_to_proxy`` + properties must be False. + """ + + @property + def is_connected(self) -> bool: + """Whether the connection is actively connected to any origin (proxy or target)""" + + @property + def has_connected_to_proxy(self) -> bool: + """Whether the connection has successfully connected to its proxy. + This returns False if no proxy is in use. Used to determine whether + errors are coming from the proxy layer or from tunnelling to the target origin. + """ + + class BaseHTTPSConnection(BaseHTTPConnection, Protocol): + default_port: typing.ClassVar[int] + default_socket_options: typing.ClassVar[_TYPE_SOCKET_OPTIONS] + + # Certificate verification methods + cert_reqs: int | str | None + assert_hostname: None | str | typing.Literal[False] + assert_fingerprint: str | None + ssl_context: ssl.SSLContext | None + + # Trusted CAs + ca_certs: str | None + ca_cert_dir: str | None + ca_cert_data: None | str | bytes + + # TLS version + ssl_minimum_version: int | None + ssl_maximum_version: int | None + ssl_version: int | str | None # Deprecated + + # Client certificates + cert_file: str | None + key_file: str | None + key_password: str | None + + def __init__( + self, + host: str, + port: int | None = None, + *, + timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, + source_address: tuple[str, int] | None = None, + blocksize: int = 16384, + socket_options: _TYPE_SOCKET_OPTIONS | None = ..., + proxy: Url | None = None, + proxy_config: ProxyConfig | None = None, + cert_reqs: int | str | None = None, + assert_hostname: None | str | typing.Literal[False] = None, + assert_fingerprint: str | None = None, + server_hostname: str | None = None, + ssl_context: ssl.SSLContext | None = None, + ca_certs: str | None = None, + ca_cert_dir: str | None = None, + ca_cert_data: None | str | bytes = None, + ssl_minimum_version: int | None = None, + ssl_maximum_version: int | None = None, + ssl_version: int | str | None = None, # Deprecated + cert_file: str | None = None, + key_file: str | None = None, + key_password: str | None = None, + ) -> None: ... diff --git a/venv/lib/python3.13/site-packages/urllib3/_collections.py b/venv/lib/python3.13/site-packages/urllib3/_collections.py new file mode 100644 index 0000000000000000000000000000000000000000..1b6c1364213b990c716c39d7cc6427cb319cb948 --- /dev/null +++ b/venv/lib/python3.13/site-packages/urllib3/_collections.py @@ -0,0 +1,479 @@ +from __future__ import annotations + +import typing +from collections import OrderedDict +from enum import Enum, auto +from threading import RLock + +if typing.TYPE_CHECKING: + # We can only import Protocol if TYPE_CHECKING because it's a development + # dependency, and is not available at runtime. + from typing import Protocol + + from typing_extensions import Self + + class HasGettableStringKeys(Protocol): + def keys(self) -> typing.Iterator[str]: ... + + def __getitem__(self, key: str) -> str: ... + + +__all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"] + + +# Key type +_KT = typing.TypeVar("_KT") +# Value type +_VT = typing.TypeVar("_VT") +# Default type +_DT = typing.TypeVar("_DT") + +ValidHTTPHeaderSource = typing.Union[ + "HTTPHeaderDict", + typing.Mapping[str, str], + typing.Iterable[tuple[str, str]], + "HasGettableStringKeys", +] + + +class _Sentinel(Enum): + not_passed = auto() + + +def ensure_can_construct_http_header_dict( + potential: object, +) -> ValidHTTPHeaderSource | None: + if isinstance(potential, HTTPHeaderDict): + return potential + elif isinstance(potential, typing.Mapping): + # Full runtime checking of the contents of a Mapping is expensive, so for the + # purposes of typechecking, we assume that any Mapping is the right shape. + return typing.cast(typing.Mapping[str, str], potential) + elif isinstance(potential, typing.Iterable): + # Similarly to Mapping, full runtime checking of the contents of an Iterable is + # expensive, so for the purposes of typechecking, we assume that any Iterable + # is the right shape. + return typing.cast(typing.Iterable[tuple[str, str]], potential) + elif hasattr(potential, "keys") and hasattr(potential, "__getitem__"): + return typing.cast("HasGettableStringKeys", potential) + else: + return None + + +class RecentlyUsedContainer(typing.Generic[_KT, _VT], typing.MutableMapping[_KT, _VT]): + """ + Provides a thread-safe dict-like container which maintains up to + ``maxsize`` keys while throwing away the least-recently-used keys beyond + ``maxsize``. + + :param maxsize: + Maximum number of recent elements to retain. + + :param dispose_func: + Every time an item is evicted from the container, + ``dispose_func(value)`` is called. Callback which will get called + """ + + _container: typing.OrderedDict[_KT, _VT] + _maxsize: int + dispose_func: typing.Callable[[_VT], None] | None + lock: RLock + + def __init__( + self, + maxsize: int = 10, + dispose_func: typing.Callable[[_VT], None] | None = None, + ) -> None: + super().__init__() + self._maxsize = maxsize + self.dispose_func = dispose_func + self._container = OrderedDict() + self.lock = RLock() + + def __getitem__(self, key: _KT) -> _VT: + # Re-insert the item, moving it to the end of the eviction line. + with self.lock: + item = self._container.pop(key) + self._container[key] = item + return item + + def __setitem__(self, key: _KT, value: _VT) -> None: + evicted_item = None + with self.lock: + # Possibly evict the existing value of 'key' + try: + # If the key exists, we'll overwrite it, which won't change the + # size of the pool. Because accessing a key should move it to + # the end of the eviction line, we pop it out first. + evicted_item = key, self._container.pop(key) + self._container[key] = value + except KeyError: + # When the key does not exist, we insert the value first so that + # evicting works in all cases, including when self._maxsize is 0 + self._container[key] = value + if len(self._container) > self._maxsize: + # If we didn't evict an existing value, and we've hit our maximum + # size, then we have to evict the least recently used item from + # the beginning of the container. + evicted_item = self._container.popitem(last=False) + + # After releasing the lock on the pool, dispose of any evicted value. + if evicted_item is not None and self.dispose_func: + _, evicted_value = evicted_item + self.dispose_func(evicted_value) + + def __delitem__(self, key: _KT) -> None: + with self.lock: + value = self._container.pop(key) + + if self.dispose_func: + self.dispose_func(value) + + def __len__(self) -> int: + with self.lock: + return len(self._container) + + def __iter__(self) -> typing.NoReturn: + raise NotImplementedError( + "Iteration over this class is unlikely to be threadsafe." + ) + + def clear(self) -> None: + with self.lock: + # Copy pointers to all values, then wipe the mapping + values = list(self._container.values()) + self._container.clear() + + if self.dispose_func: + for value in values: + self.dispose_func(value) + + def keys(self) -> set[_KT]: # type: ignore[override] + with self.lock: + return set(self._container.keys()) + + +class HTTPHeaderDictItemView(set[tuple[str, str]]): + """ + HTTPHeaderDict is unusual for a Mapping[str, str] in that it has two modes of + address. + + If we directly try to get an item with a particular name, we will get a string + back that is the concatenated version of all the values: + + >>> d['X-Header-Name'] + 'Value1, Value2, Value3' + + However, if we iterate over an HTTPHeaderDict's items, we will optionally combine + these values based on whether combine=True was called when building up the dictionary + + >>> d = HTTPHeaderDict({"A": "1", "B": "foo"}) + >>> d.add("A", "2", combine=True) + >>> d.add("B", "bar") + >>> list(d.items()) + [ + ('A', '1, 2'), + ('B', 'foo'), + ('B', 'bar'), + ] + + This class conforms to the interface required by the MutableMapping ABC while + also giving us the nonstandard iteration behavior we want; items with duplicate + keys, ordered by time of first insertion. + """ + + _headers: HTTPHeaderDict + + def __init__(self, headers: HTTPHeaderDict) -> None: + self._headers = headers + + def __len__(self) -> int: + return len(list(self._headers.iteritems())) + + def __iter__(self) -> typing.Iterator[tuple[str, str]]: + return self._headers.iteritems() + + def __contains__(self, item: object) -> bool: + if isinstance(item, tuple) and len(item) == 2: + passed_key, passed_val = item + if isinstance(passed_key, str) and isinstance(passed_val, str): + return self._headers._has_value_for_header(passed_key, passed_val) + return False + + +class HTTPHeaderDict(typing.MutableMapping[str, str]): + """ + :param headers: + An iterable of field-value pairs. Must not contain multiple field names + when compared case-insensitively. + + :param kwargs: + Additional field-value pairs to pass in to ``dict.update``. + + A ``dict`` like container for storing HTTP Headers. + + Field names are stored and compared case-insensitively in compliance with + RFC 7230. Iteration provides the first case-sensitive key seen for each + case-insensitive pair. + + Using ``__setitem__`` syntax overwrites fields that compare equal + case-insensitively in order to maintain ``dict``'s api. For fields that + compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add`` + in a loop. + + If multiple fields that are equal case-insensitively are passed to the + constructor or ``.update``, the behavior is undefined and some will be + lost. + + >>> headers = HTTPHeaderDict() + >>> headers.add('Set-Cookie', 'foo=bar') + >>> headers.add('set-cookie', 'baz=quxx') + >>> headers['content-length'] = '7' + >>> headers['SET-cookie'] + 'foo=bar, baz=quxx' + >>> headers['Content-Length'] + '7' + """ + + _container: typing.MutableMapping[str, list[str]] + + def __init__(self, headers: ValidHTTPHeaderSource | None = None, **kwargs: str): + super().__init__() + self._container = {} # 'dict' is insert-ordered + if headers is not None: + if isinstance(headers, HTTPHeaderDict): + self._copy_from(headers) + else: + self.extend(headers) + if kwargs: + self.extend(kwargs) + + def __setitem__(self, key: str, val: str) -> None: + # avoid a bytes/str comparison by decoding before httplib + if isinstance(key, bytes): + key = key.decode("latin-1") + self._container[key.lower()] = [key, val] + + def __getitem__(self, key: str) -> str: + val = self._container[key.lower()] + return ", ".join(val[1:]) + + def __delitem__(self, key: str) -> None: + del self._container[key.lower()] + + def __contains__(self, key: object) -> bool: + if isinstance(key, str): + return key.lower() in self._container + return False + + def setdefault(self, key: str, default: str = "") -> str: + return super().setdefault(key, default) + + def __eq__(self, other: object) -> bool: + maybe_constructable = ensure_can_construct_http_header_dict(other) + if maybe_constructable is None: + return False + else: + other_as_http_header_dict = type(self)(maybe_constructable) + + return {k.lower(): v for k, v in self.itermerged()} == { + k.lower(): v for k, v in other_as_http_header_dict.itermerged() + } + + def __ne__(self, other: object) -> bool: + return not self.__eq__(other) + + def __len__(self) -> int: + return len(self._container) + + def __iter__(self) -> typing.Iterator[str]: + # Only provide the originally cased names + for vals in self._container.values(): + yield vals[0] + + def discard(self, key: str) -> None: + try: + del self[key] + except KeyError: + pass + + def add(self, key: str, val: str, *, combine: bool = False) -> None: + """Adds a (name, value) pair, doesn't overwrite the value if it already + exists. + + If this is called with combine=True, instead of adding a new header value + as a distinct item during iteration, this will instead append the value to + any existing header value with a comma. If no existing header value exists + for the key, then the value will simply be added, ignoring the combine parameter. + + >>> headers = HTTPHeaderDict(foo='bar') + >>> headers.add('Foo', 'baz') + >>> headers['foo'] + 'bar, baz' + >>> list(headers.items()) + [('foo', 'bar'), ('foo', 'baz')] + >>> headers.add('foo', 'quz', combine=True) + >>> list(headers.items()) + [('foo', 'bar, baz, quz')] + """ + # avoid a bytes/str comparison by decoding before httplib + if isinstance(key, bytes): + key = key.decode("latin-1") + key_lower = key.lower() + new_vals = [key, val] + # Keep the common case aka no item present as fast as possible + vals = self._container.setdefault(key_lower, new_vals) + if new_vals is not vals: + # if there are values here, then there is at least the initial + # key/value pair + assert len(vals) >= 2 + if combine: + vals[-1] = vals[-1] + ", " + val + else: + vals.append(val) + + def extend(self, *args: ValidHTTPHeaderSource, **kwargs: str) -> None: + """Generic import function for any type of header-like object. + Adapted version of MutableMapping.update in order to insert items + with self.add instead of self.__setitem__ + """ + if len(args) > 1: + raise TypeError( + f"extend() takes at most 1 positional arguments ({len(args)} given)" + ) + other = args[0] if len(args) >= 1 else () + + if isinstance(other, HTTPHeaderDict): + for key, val in other.iteritems(): + self.add(key, val) + elif isinstance(other, typing.Mapping): + for key, val in other.items(): + self.add(key, val) + elif isinstance(other, typing.Iterable): + other = typing.cast(typing.Iterable[tuple[str, str]], other) + for key, value in other: + self.add(key, value) + elif hasattr(other, "keys") and hasattr(other, "__getitem__"): + # THIS IS NOT A TYPESAFE BRANCH + # In this branch, the object has a `keys` attr but is not a Mapping or any of + # the other types indicated in the method signature. We do some stuff with + # it as though it partially implements the Mapping interface, but we're not + # doing that stuff safely AT ALL. + for key in other.keys(): + self.add(key, other[key]) + + for key, value in kwargs.items(): + self.add(key, value) + + @typing.overload + def getlist(self, key: str) -> list[str]: ... + + @typing.overload + def getlist(self, key: str, default: _DT) -> list[str] | _DT: ... + + def getlist( + self, key: str, default: _Sentinel | _DT = _Sentinel.not_passed + ) -> list[str] | _DT: + """Returns a list of all the values for the named field. Returns an + empty list if the key doesn't exist.""" + try: + vals = self._container[key.lower()] + except KeyError: + if default is _Sentinel.not_passed: + # _DT is unbound; empty list is instance of List[str] + return [] + # _DT is bound; default is instance of _DT + return default + else: + # _DT may or may not be bound; vals[1:] is instance of List[str], which + # meets our external interface requirement of `Union[List[str], _DT]`. + return vals[1:] + + def _prepare_for_method_change(self) -> Self: + """ + Remove content-specific header fields before changing the request + method to GET or HEAD according to RFC 9110, Section 15.4. + """ + content_specific_headers = [ + "Content-Encoding", + "Content-Language", + "Content-Location", + "Content-Type", + "Content-Length", + "Digest", + "Last-Modified", + ] + for header in content_specific_headers: + self.discard(header) + return self + + # Backwards compatibility for httplib + getheaders = getlist + getallmatchingheaders = getlist + iget = getlist + + # Backwards compatibility for http.cookiejar + get_all = getlist + + def __repr__(self) -> str: + return f"{type(self).__name__}({dict(self.itermerged())})" + + def _copy_from(self, other: HTTPHeaderDict) -> None: + for key in other: + val = other.getlist(key) + self._container[key.lower()] = [key, *val] + + def copy(self) -> Self: + clone = type(self)() + clone._copy_from(self) + return clone + + def iteritems(self) -> typing.Iterator[tuple[str, str]]: + """Iterate over all header lines, including duplicate ones.""" + for key in self: + vals = self._container[key.lower()] + for val in vals[1:]: + yield vals[0], val + + def itermerged(self) -> typing.Iterator[tuple[str, str]]: + """Iterate over all headers, merging duplicate ones together.""" + for key in self: + val = self._container[key.lower()] + yield val[0], ", ".join(val[1:]) + + def items(self) -> HTTPHeaderDictItemView: # type: ignore[override] + return HTTPHeaderDictItemView(self) + + def _has_value_for_header(self, header_name: str, potential_value: str) -> bool: + if header_name in self: + return potential_value in self._container[header_name.lower()][1:] + return False + + def __ior__(self, other: object) -> HTTPHeaderDict: + # Supports extending a header dict in-place using operator |= + # combining items with add instead of __setitem__ + maybe_constructable = ensure_can_construct_http_header_dict(other) + if maybe_constructable is None: + return NotImplemented + self.extend(maybe_constructable) + return self + + def __or__(self, other: object) -> Self: + # Supports merging header dicts using operator | + # combining items with add instead of __setitem__ + maybe_constructable = ensure_can_construct_http_header_dict(other) + if maybe_constructable is None: + return NotImplemented + result = self.copy() + result.extend(maybe_constructable) + return result + + def __ror__(self, other: object) -> Self: + # Supports merging header dicts using operator | when other is on left side + # combining items with add instead of __setitem__ + maybe_constructable = ensure_can_construct_http_header_dict(other) + if maybe_constructable is None: + return NotImplemented + result = type(self)(maybe_constructable) + result.extend(self) + return result diff --git a/venv/lib/python3.13/site-packages/urllib3/_request_methods.py b/venv/lib/python3.13/site-packages/urllib3/_request_methods.py new file mode 100644 index 0000000000000000000000000000000000000000..297c271bf401c1cb48c6225f8822e78f58c3ca56 --- /dev/null +++ b/venv/lib/python3.13/site-packages/urllib3/_request_methods.py @@ -0,0 +1,278 @@ +from __future__ import annotations + +import json as _json +import typing +from urllib.parse import urlencode + +from ._base_connection import _TYPE_BODY +from ._collections import HTTPHeaderDict +from .filepost import _TYPE_FIELDS, encode_multipart_formdata +from .response import BaseHTTPResponse + +__all__ = ["RequestMethods"] + +_TYPE_ENCODE_URL_FIELDS = typing.Union[ + typing.Sequence[tuple[str, typing.Union[str, bytes]]], + typing.Mapping[str, typing.Union[str, bytes]], +] + + +class RequestMethods: + """ + Convenience mixin for classes who implement a :meth:`urlopen` method, such + as :class:`urllib3.HTTPConnectionPool` and + :class:`urllib3.PoolManager`. + + Provides behavior for making common types of HTTP request methods and + decides which type of request field encoding to use. + + Specifically, + + :meth:`.request_encode_url` is for sending requests whose fields are + encoded in the URL (such as GET, HEAD, DELETE). + + :meth:`.request_encode_body` is for sending requests whose fields are + encoded in the *body* of the request using multipart or www-form-urlencoded + (such as for POST, PUT, PATCH). + + :meth:`.request` is for making any kind of request, it will look up the + appropriate encoding format and use one of the above two methods to make + the request. + + Initializer parameters: + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + """ + + _encode_url_methods = {"DELETE", "GET", "HEAD", "OPTIONS"} + + def __init__(self, headers: typing.Mapping[str, str] | None = None) -> None: + self.headers = headers or {} + + def urlopen( + self, + method: str, + url: str, + body: _TYPE_BODY | None = None, + headers: typing.Mapping[str, str] | None = None, + encode_multipart: bool = True, + multipart_boundary: str | None = None, + **kw: typing.Any, + ) -> BaseHTTPResponse: # Abstract + raise NotImplementedError( + "Classes extending RequestMethods must implement " + "their own ``urlopen`` method." + ) + + def request( + self, + method: str, + url: str, + body: _TYPE_BODY | None = None, + fields: _TYPE_FIELDS | None = None, + headers: typing.Mapping[str, str] | None = None, + json: typing.Any | None = None, + **urlopen_kw: typing.Any, + ) -> BaseHTTPResponse: + """ + Make a request using :meth:`urlopen` with the appropriate encoding of + ``fields`` based on the ``method`` used. + + This is a convenience method that requires the least amount of manual + effort. It can be used in most situations, while still having the + option to drop down to more specific methods when necessary, such as + :meth:`request_encode_url`, :meth:`request_encode_body`, + or even the lowest level :meth:`urlopen`. + + :param method: + HTTP request method (such as GET, POST, PUT, etc.) + + :param url: + The URL to perform the request on. + + :param body: + Data to send in the request body, either :class:`str`, :class:`bytes`, + an iterable of :class:`str`/:class:`bytes`, or a file-like object. + + :param fields: + Data to encode and send in the URL or request body, depending on ``method``. + + :param headers: + Dictionary of custom headers to send, such as User-Agent, + If-None-Match, etc. If None, pool headers are used. If provided, + these headers completely replace any pool-specific headers. + + :param json: + Data to encode and send as JSON with UTF-encoded in the request body. + The ``"Content-Type"`` header will be set to ``"application/json"`` + unless specified otherwise. + """ + method = method.upper() + + if json is not None and body is not None: + raise TypeError( + "request got values for both 'body' and 'json' parameters which are mutually exclusive" + ) + + if json is not None: + if headers is None: + headers = self.headers + + if not ("content-type" in map(str.lower, headers.keys())): + headers = HTTPHeaderDict(headers) + headers["Content-Type"] = "application/json" + + body = _json.dumps(json, separators=(",", ":"), ensure_ascii=False).encode( + "utf-8" + ) + + if body is not None: + urlopen_kw["body"] = body + + if method in self._encode_url_methods: + return self.request_encode_url( + method, + url, + fields=fields, # type: ignore[arg-type] + headers=headers, + **urlopen_kw, + ) + else: + return self.request_encode_body( + method, url, fields=fields, headers=headers, **urlopen_kw + ) + + def request_encode_url( + self, + method: str, + url: str, + fields: _TYPE_ENCODE_URL_FIELDS | None = None, + headers: typing.Mapping[str, str] | None = None, + **urlopen_kw: str, + ) -> BaseHTTPResponse: + """ + Make a request using :meth:`urlopen` with the ``fields`` encoded in + the url. This is useful for request methods like GET, HEAD, DELETE, etc. + + :param method: + HTTP request method (such as GET, POST, PUT, etc.) + + :param url: + The URL to perform the request on. + + :param fields: + Data to encode and send in the URL. + + :param headers: + Dictionary of custom headers to send, such as User-Agent, + If-None-Match, etc. If None, pool headers are used. If provided, + these headers completely replace any pool-specific headers. + """ + if headers is None: + headers = self.headers + + extra_kw: dict[str, typing.Any] = {"headers": headers} + extra_kw.update(urlopen_kw) + + if fields: + url += "?" + urlencode(fields) + + return self.urlopen(method, url, **extra_kw) + + def request_encode_body( + self, + method: str, + url: str, + fields: _TYPE_FIELDS | None = None, + headers: typing.Mapping[str, str] | None = None, + encode_multipart: bool = True, + multipart_boundary: str | None = None, + **urlopen_kw: str, + ) -> BaseHTTPResponse: + """ + Make a request using :meth:`urlopen` with the ``fields`` encoded in + the body. This is useful for request methods like POST, PUT, PATCH, etc. + + When ``encode_multipart=True`` (default), then + :func:`urllib3.encode_multipart_formdata` is used to encode + the payload with the appropriate content type. Otherwise + :func:`urllib.parse.urlencode` is used with the + 'application/x-www-form-urlencoded' content type. + + Multipart encoding must be used when posting files, and it's reasonably + safe to use it in other times too. However, it may break request + signing, such as with OAuth. + + Supports an optional ``fields`` parameter of key/value strings AND + key/filetuple. A filetuple is a (filename, data, MIME type) tuple where + the MIME type is optional. For example:: + + fields = { + 'foo': 'bar', + 'fakefile': ('foofile.txt', 'contents of foofile'), + 'realfile': ('barfile.txt', open('realfile').read()), + 'typedfile': ('bazfile.bin', open('bazfile').read(), + 'image/jpeg'), + 'nonamefile': 'contents of nonamefile field', + } + + When uploading a file, providing a filename (the first parameter of the + tuple) is optional but recommended to best mimic behavior of browsers. + + Note that if ``headers`` are supplied, the 'Content-Type' header will + be overwritten because it depends on the dynamic random boundary string + which is used to compose the body of the request. The random boundary + string can be explicitly set with the ``multipart_boundary`` parameter. + + :param method: + HTTP request method (such as GET, POST, PUT, etc.) + + :param url: + The URL to perform the request on. + + :param fields: + Data to encode and send in the request body. + + :param headers: + Dictionary of custom headers to send, such as User-Agent, + If-None-Match, etc. If None, pool headers are used. If provided, + these headers completely replace any pool-specific headers. + + :param encode_multipart: + If True, encode the ``fields`` using the multipart/form-data MIME + format. + + :param multipart_boundary: + If not specified, then a random boundary will be generated using + :func:`urllib3.filepost.choose_boundary`. + """ + if headers is None: + headers = self.headers + + extra_kw: dict[str, typing.Any] = {"headers": HTTPHeaderDict(headers)} + body: bytes | str + + if fields: + if "body" in urlopen_kw: + raise TypeError( + "request got values for both 'fields' and 'body', can only specify one." + ) + + if encode_multipart: + body, content_type = encode_multipart_formdata( + fields, boundary=multipart_boundary + ) + else: + body, content_type = ( + urlencode(fields), # type: ignore[arg-type] + "application/x-www-form-urlencoded", + ) + + extra_kw["body"] = body + extra_kw["headers"].setdefault("Content-Type", content_type) + + extra_kw.update(urlopen_kw) + + return self.urlopen(method, url, **extra_kw) diff --git a/venv/lib/python3.13/site-packages/urllib3/_version.py b/venv/lib/python3.13/site-packages/urllib3/_version.py new file mode 100644 index 0000000000000000000000000000000000000000..49707ce59783e1f6c31fc722c949232506af7dda --- /dev/null +++ b/venv/lib/python3.13/site-packages/urllib3/_version.py @@ -0,0 +1,21 @@ +# file generated by setuptools-scm +# don't change, don't track in version control + +__all__ = ["__version__", "__version_tuple__", "version", "version_tuple"] + +TYPE_CHECKING = False +if TYPE_CHECKING: + from typing import Tuple + from typing import Union + + VERSION_TUPLE = Tuple[Union[int, str], ...] +else: + VERSION_TUPLE = object + +version: str +__version__: str +__version_tuple__: VERSION_TUPLE +version_tuple: VERSION_TUPLE + +__version__ = version = '2.5.0' +__version_tuple__ = version_tuple = (2, 5, 0) diff --git a/venv/lib/python3.13/site-packages/urllib3/connection.py b/venv/lib/python3.13/site-packages/urllib3/connection.py new file mode 100644 index 0000000000000000000000000000000000000000..8082387d94b6559ca2a24126ac1511285389dc6f --- /dev/null +++ b/venv/lib/python3.13/site-packages/urllib3/connection.py @@ -0,0 +1,1093 @@ +from __future__ import annotations + +import datetime +import http.client +import logging +import os +import re +import socket +import sys +import threading +import typing +import warnings +from http.client import HTTPConnection as _HTTPConnection +from http.client import HTTPException as HTTPException # noqa: F401 +from http.client import ResponseNotReady +from socket import timeout as SocketTimeout + +if typing.TYPE_CHECKING: + from .response import HTTPResponse + from .util.ssl_ import _TYPE_PEER_CERT_RET_DICT + from .util.ssltransport import SSLTransport + +from ._collections import HTTPHeaderDict +from .http2 import probe as http2_probe +from .util.response import assert_header_parsing +from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT, Timeout +from .util.util import to_str +from .util.wait import wait_for_read + +try: # Compiled with SSL? + import ssl + + BaseSSLError = ssl.SSLError +except (ImportError, AttributeError): + ssl = None # type: ignore[assignment] + + class BaseSSLError(BaseException): # type: ignore[no-redef] + pass + + +from ._base_connection import _TYPE_BODY +from ._base_connection import ProxyConfig as ProxyConfig +from ._base_connection import _ResponseOptions as _ResponseOptions +from ._version import __version__ +from .exceptions import ( + ConnectTimeoutError, + HeaderParsingError, + NameResolutionError, + NewConnectionError, + ProxyError, + SystemTimeWarning, +) +from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection, ssl_ +from .util.request import body_to_chunks +from .util.ssl_ import assert_fingerprint as _assert_fingerprint +from .util.ssl_ import ( + create_urllib3_context, + is_ipaddress, + resolve_cert_reqs, + resolve_ssl_version, + ssl_wrap_socket, +) +from .util.ssl_match_hostname import CertificateError, match_hostname +from .util.url import Url + +# Not a no-op, we're adding this to the namespace so it can be imported. +ConnectionError = ConnectionError +BrokenPipeError = BrokenPipeError + + +log = logging.getLogger(__name__) + +port_by_scheme = {"http": 80, "https": 443} + +# When it comes time to update this value as a part of regular maintenance +# (ie test_recent_date is failing) update it to ~6 months before the current date. +RECENT_DATE = datetime.date(2025, 1, 1) + +_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") + + +class HTTPConnection(_HTTPConnection): + """ + Based on :class:`http.client.HTTPConnection` but provides an extra constructor + backwards-compatibility layer between older and newer Pythons. + + Additional keyword parameters are used to configure attributes of the connection. + Accepted parameters include: + + - ``source_address``: Set the source address for the current connection. + - ``socket_options``: Set specific options on the underlying socket. If not specified, then + defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling + Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. + + For example, if you wish to enable TCP Keep Alive in addition to the defaults, + you might pass: + + .. code-block:: python + + HTTPConnection.default_socket_options + [ + (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), + ] + + Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). + """ + + default_port: typing.ClassVar[int] = port_by_scheme["http"] # type: ignore[misc] + + #: Disable Nagle's algorithm by default. + #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]`` + default_socket_options: typing.ClassVar[connection._TYPE_SOCKET_OPTIONS] = [ + (socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + ] + + #: Whether this connection verifies the host's certificate. + is_verified: bool = False + + #: Whether this proxy connection verified the proxy host's certificate. + # If no proxy is currently connected to the value will be ``None``. + proxy_is_verified: bool | None = None + + blocksize: int + source_address: tuple[str, int] | None + socket_options: connection._TYPE_SOCKET_OPTIONS | None + + _has_connected_to_proxy: bool + _response_options: _ResponseOptions | None + _tunnel_host: str | None + _tunnel_port: int | None + _tunnel_scheme: str | None + + def __init__( + self, + host: str, + port: int | None = None, + *, + timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, + source_address: tuple[str, int] | None = None, + blocksize: int = 16384, + socket_options: None | ( + connection._TYPE_SOCKET_OPTIONS + ) = default_socket_options, + proxy: Url | None = None, + proxy_config: ProxyConfig | None = None, + ) -> None: + super().__init__( + host=host, + port=port, + timeout=Timeout.resolve_default_timeout(timeout), + source_address=source_address, + blocksize=blocksize, + ) + self.socket_options = socket_options + self.proxy = proxy + self.proxy_config = proxy_config + + self._has_connected_to_proxy = False + self._response_options = None + self._tunnel_host: str | None = None + self._tunnel_port: int | None = None + self._tunnel_scheme: str | None = None + + @property + def host(self) -> str: + """ + Getter method to remove any trailing dots that indicate the hostname is an FQDN. + + In general, SSL certificates don't include the trailing dot indicating a + fully-qualified domain name, and thus, they don't validate properly when + checked against a domain name that includes the dot. In addition, some + servers may not expect to receive the trailing dot when provided. + + However, the hostname with trailing dot is critical to DNS resolution; doing a + lookup with the trailing dot will properly only resolve the appropriate FQDN, + whereas a lookup without a trailing dot will search the system's search domain + list. Thus, it's important to keep the original host around for use only in + those cases where it's appropriate (i.e., when doing DNS lookup to establish the + actual TCP connection across which we're going to send HTTP requests). + """ + return self._dns_host.rstrip(".") + + @host.setter + def host(self, value: str) -> None: + """ + Setter for the `host` property. + + We assume that only urllib3 uses the _dns_host attribute; httplib itself + only uses `host`, and it seems reasonable that other libraries follow suit. + """ + self._dns_host = value + + def _new_conn(self) -> socket.socket: + """Establish a socket connection and set nodelay settings on it. + + :return: New socket connection. + """ + try: + sock = connection.create_connection( + (self._dns_host, self.port), + self.timeout, + source_address=self.source_address, + socket_options=self.socket_options, + ) + except socket.gaierror as e: + raise NameResolutionError(self.host, self, e) from e + except SocketTimeout as e: + raise ConnectTimeoutError( + self, + f"Connection to {self.host} timed out. (connect timeout={self.timeout})", + ) from e + + except OSError as e: + raise NewConnectionError( + self, f"Failed to establish a new connection: {e}" + ) from e + + sys.audit("http.client.connect", self, self.host, self.port) + + return sock + + def set_tunnel( + self, + host: str, + port: int | None = None, + headers: typing.Mapping[str, str] | None = None, + scheme: str = "http", + ) -> None: + if scheme not in ("http", "https"): + raise ValueError( + f"Invalid proxy scheme for tunneling: {scheme!r}, must be either 'http' or 'https'" + ) + super().set_tunnel(host, port=port, headers=headers) + self._tunnel_scheme = scheme + + if sys.version_info < (3, 11, 9) or ((3, 12) <= sys.version_info < (3, 12, 3)): + # Taken from python/cpython#100986 which was backported in 3.11.9 and 3.12.3. + # When using connection_from_host, host will come without brackets. + def _wrap_ipv6(self, ip: bytes) -> bytes: + if b":" in ip and ip[0] != b"["[0]: + return b"[" + ip + b"]" + return ip + + if sys.version_info < (3, 11, 9): + # `_tunnel` copied from 3.11.13 backporting + # https://github.com/python/cpython/commit/0d4026432591d43185568dd31cef6a034c4b9261 + # and https://github.com/python/cpython/commit/6fbc61070fda2ffb8889e77e3b24bca4249ab4d1 + def _tunnel(self) -> None: + _MAXLINE = http.client._MAXLINE # type: ignore[attr-defined] + connect = b"CONNECT %s:%d HTTP/1.0\r\n" % ( # type: ignore[str-format] + self._wrap_ipv6(self._tunnel_host.encode("ascii")), # type: ignore[union-attr] + self._tunnel_port, + ) + headers = [connect] + for header, value in self._tunnel_headers.items(): # type: ignore[attr-defined] + headers.append(f"{header}: {value}\r\n".encode("latin-1")) + headers.append(b"\r\n") + # Making a single send() call instead of one per line encourages + # the host OS to use a more optimal packet size instead of + # potentially emitting a series of small packets. + self.send(b"".join(headers)) + del headers + + response = self.response_class(self.sock, method=self._method) # type: ignore[attr-defined] + try: + (version, code, message) = response._read_status() # type: ignore[attr-defined] + + if code != http.HTTPStatus.OK: + self.close() + raise OSError( + f"Tunnel connection failed: {code} {message.strip()}" + ) + while True: + line = response.fp.readline(_MAXLINE + 1) + if len(line) > _MAXLINE: + raise http.client.LineTooLong("header line") + if not line: + # for sites which EOF without sending a trailer + break + if line in (b"\r\n", b"\n", b""): + break + + if self.debuglevel > 0: + print("header:", line.decode()) + finally: + response.close() + + elif (3, 12) <= sys.version_info < (3, 12, 3): + # `_tunnel` copied from 3.12.11 backporting + # https://github.com/python/cpython/commit/23aef575c7629abcd4aaf028ebd226fb41a4b3c8 + def _tunnel(self) -> None: # noqa: F811 + connect = b"CONNECT %s:%d HTTP/1.1\r\n" % ( # type: ignore[str-format] + self._wrap_ipv6(self._tunnel_host.encode("idna")), # type: ignore[union-attr] + self._tunnel_port, + ) + headers = [connect] + for header, value in self._tunnel_headers.items(): # type: ignore[attr-defined] + headers.append(f"{header}: {value}\r\n".encode("latin-1")) + headers.append(b"\r\n") + # Making a single send() call instead of one per line encourages + # the host OS to use a more optimal packet size instead of + # potentially emitting a series of small packets. + self.send(b"".join(headers)) + del headers + + response = self.response_class(self.sock, method=self._method) # type: ignore[attr-defined] + try: + (version, code, message) = response._read_status() # type: ignore[attr-defined] + + self._raw_proxy_headers = http.client._read_headers(response.fp) # type: ignore[attr-defined] + + if self.debuglevel > 0: + for header in self._raw_proxy_headers: + print("header:", header.decode()) + + if code != http.HTTPStatus.OK: + self.close() + raise OSError( + f"Tunnel connection failed: {code} {message.strip()}" + ) + + finally: + response.close() + + def connect(self) -> None: + self.sock = self._new_conn() + if self._tunnel_host: + # If we're tunneling it means we're connected to our proxy. + self._has_connected_to_proxy = True + + # TODO: Fix tunnel so it doesn't depend on self.sock state. + self._tunnel() + + # If there's a proxy to be connected to we are fully connected. + # This is set twice (once above and here) due to forwarding proxies + # not using tunnelling. + self._has_connected_to_proxy = bool(self.proxy) + + if self._has_connected_to_proxy: + self.proxy_is_verified = False + + @property + def is_closed(self) -> bool: + return self.sock is None + + @property + def is_connected(self) -> bool: + if self.sock is None: + return False + return not wait_for_read(self.sock, timeout=0.0) + + @property + def has_connected_to_proxy(self) -> bool: + return self._has_connected_to_proxy + + @property + def proxy_is_forwarding(self) -> bool: + """ + Return True if a forwarding proxy is configured, else return False + """ + return bool(self.proxy) and self._tunnel_host is None + + @property + def proxy_is_tunneling(self) -> bool: + """ + Return True if a tunneling proxy is configured, else return False + """ + return self._tunnel_host is not None + + def close(self) -> None: + try: + super().close() + finally: + # Reset all stateful properties so connection + # can be re-used without leaking prior configs. + self.sock = None + self.is_verified = False + self.proxy_is_verified = None + self._has_connected_to_proxy = False + self._response_options = None + self._tunnel_host = None + self._tunnel_port = None + self._tunnel_scheme = None + + def putrequest( + self, + method: str, + url: str, + skip_host: bool = False, + skip_accept_encoding: bool = False, + ) -> None: + """""" + # Empty docstring because the indentation of CPython's implementation + # is broken but we don't want this method in our documentation. + match = _CONTAINS_CONTROL_CHAR_RE.search(method) + if match: + raise ValueError( + f"Method cannot contain non-token characters {method!r} (found at least {match.group()!r})" + ) + + return super().putrequest( + method, url, skip_host=skip_host, skip_accept_encoding=skip_accept_encoding + ) + + def putheader(self, header: str, *values: str) -> None: # type: ignore[override] + """""" + if not any(isinstance(v, str) and v == SKIP_HEADER for v in values): + super().putheader(header, *values) + elif to_str(header.lower()) not in SKIPPABLE_HEADERS: + skippable_headers = "', '".join( + [str.title(header) for header in sorted(SKIPPABLE_HEADERS)] + ) + raise ValueError( + f"urllib3.util.SKIP_HEADER only supports '{skippable_headers}'" + ) + + # `request` method's signature intentionally violates LSP. + # urllib3's API is different from `http.client.HTTPConnection` and the subclassing is only incidental. + def request( # type: ignore[override] + self, + method: str, + url: str, + body: _TYPE_BODY | None = None, + headers: typing.Mapping[str, str] | None = None, + *, + chunked: bool = False, + preload_content: bool = True, + decode_content: bool = True, + enforce_content_length: bool = True, + ) -> None: + # Update the inner socket's timeout value to send the request. + # This only triggers if the connection is re-used. + if self.sock is not None: + self.sock.settimeout(self.timeout) + + # Store these values to be fed into the HTTPResponse + # object later. TODO: Remove this in favor of a real + # HTTP lifecycle mechanism. + + # We have to store these before we call .request() + # because sometimes we can still salvage a response + # off the wire even if we aren't able to completely + # send the request body. + self._response_options = _ResponseOptions( + request_method=method, + request_url=url, + preload_content=preload_content, + decode_content=decode_content, + enforce_content_length=enforce_content_length, + ) + + if headers is None: + headers = {} + header_keys = frozenset(to_str(k.lower()) for k in headers) + skip_accept_encoding = "accept-encoding" in header_keys + skip_host = "host" in header_keys + self.putrequest( + method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host + ) + + # Transform the body into an iterable of sendall()-able chunks + # and detect if an explicit Content-Length is doable. + chunks_and_cl = body_to_chunks(body, method=method, blocksize=self.blocksize) + chunks = chunks_and_cl.chunks + content_length = chunks_and_cl.content_length + + # When chunked is explicit set to 'True' we respect that. + if chunked: + if "transfer-encoding" not in header_keys: + self.putheader("Transfer-Encoding", "chunked") + else: + # Detect whether a framing mechanism is already in use. If so + # we respect that value, otherwise we pick chunked vs content-length + # depending on the type of 'body'. + if "content-length" in header_keys: + chunked = False + elif "transfer-encoding" in header_keys: + chunked = True + + # Otherwise we go off the recommendation of 'body_to_chunks()'. + else: + chunked = False + if content_length is None: + if chunks is not None: + chunked = True + self.putheader("Transfer-Encoding", "chunked") + else: + self.putheader("Content-Length", str(content_length)) + + # Now that framing headers are out of the way we send all the other headers. + if "user-agent" not in header_keys: + self.putheader("User-Agent", _get_default_user_agent()) + for header, value in headers.items(): + self.putheader(header, value) + self.endheaders() + + # If we're given a body we start sending that in chunks. + if chunks is not None: + for chunk in chunks: + # Sending empty chunks isn't allowed for TE: chunked + # as it indicates the end of the body. + if not chunk: + continue + if isinstance(chunk, str): + chunk = chunk.encode("utf-8") + if chunked: + self.send(b"%x\r\n%b\r\n" % (len(chunk), chunk)) + else: + self.send(chunk) + + # Regardless of whether we have a body or not, if we're in + # chunked mode we want to send an explicit empty chunk. + if chunked: + self.send(b"0\r\n\r\n") + + def request_chunked( + self, + method: str, + url: str, + body: _TYPE_BODY | None = None, + headers: typing.Mapping[str, str] | None = None, + ) -> None: + """ + Alternative to the common request method, which sends the + body with chunked encoding and not as one block + """ + warnings.warn( + "HTTPConnection.request_chunked() is deprecated and will be removed " + "in urllib3 v2.1.0. Instead use HTTPConnection.request(..., chunked=True).", + category=DeprecationWarning, + stacklevel=2, + ) + self.request(method, url, body=body, headers=headers, chunked=True) + + def getresponse( # type: ignore[override] + self, + ) -> HTTPResponse: + """ + Get the response from the server. + + If the HTTPConnection is in the correct state, returns an instance of HTTPResponse or of whatever object is returned by the response_class variable. + + If a request has not been sent or if a previous response has not be handled, ResponseNotReady is raised. If the HTTP response indicates that the connection should be closed, then it will be closed before the response is returned. When the connection is closed, the underlying socket is closed. + """ + # Raise the same error as http.client.HTTPConnection + if self._response_options is None: + raise ResponseNotReady() + + # Reset this attribute for being used again. + resp_options = self._response_options + self._response_options = None + + # Since the connection's timeout value may have been updated + # we need to set the timeout on the socket. + self.sock.settimeout(self.timeout) + + # This is needed here to avoid circular import errors + from .response import HTTPResponse + + # Save a reference to the shutdown function before ownership is passed + # to httplib_response + # TODO should we implement it everywhere? + _shutdown = getattr(self.sock, "shutdown", None) + + # Get the response from http.client.HTTPConnection + httplib_response = super().getresponse() + + try: + assert_header_parsing(httplib_response.msg) + except (HeaderParsingError, TypeError) as hpe: + log.warning( + "Failed to parse headers (url=%s): %s", + _url_from_connection(self, resp_options.request_url), + hpe, + exc_info=True, + ) + + headers = HTTPHeaderDict(httplib_response.msg.items()) + + response = HTTPResponse( + body=httplib_response, + headers=headers, + status=httplib_response.status, + version=httplib_response.version, + version_string=getattr(self, "_http_vsn_str", "HTTP/?"), + reason=httplib_response.reason, + preload_content=resp_options.preload_content, + decode_content=resp_options.decode_content, + original_response=httplib_response, + enforce_content_length=resp_options.enforce_content_length, + request_method=resp_options.request_method, + request_url=resp_options.request_url, + sock_shutdown=_shutdown, + ) + return response + + +class HTTPSConnection(HTTPConnection): + """ + Many of the parameters to this constructor are passed to the underlying SSL + socket by means of :py:func:`urllib3.util.ssl_wrap_socket`. + """ + + default_port = port_by_scheme["https"] # type: ignore[misc] + + cert_reqs: int | str | None = None + ca_certs: str | None = None + ca_cert_dir: str | None = None + ca_cert_data: None | str | bytes = None + ssl_version: int | str | None = None + ssl_minimum_version: int | None = None + ssl_maximum_version: int | None = None + assert_fingerprint: str | None = None + _connect_callback: typing.Callable[..., None] | None = None + + def __init__( + self, + host: str, + port: int | None = None, + *, + timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, + source_address: tuple[str, int] | None = None, + blocksize: int = 16384, + socket_options: None | ( + connection._TYPE_SOCKET_OPTIONS + ) = HTTPConnection.default_socket_options, + proxy: Url | None = None, + proxy_config: ProxyConfig | None = None, + cert_reqs: int | str | None = None, + assert_hostname: None | str | typing.Literal[False] = None, + assert_fingerprint: str | None = None, + server_hostname: str | None = None, + ssl_context: ssl.SSLContext | None = None, + ca_certs: str | None = None, + ca_cert_dir: str | None = None, + ca_cert_data: None | str | bytes = None, + ssl_minimum_version: int | None = None, + ssl_maximum_version: int | None = None, + ssl_version: int | str | None = None, # Deprecated + cert_file: str | None = None, + key_file: str | None = None, + key_password: str | None = None, + ) -> None: + super().__init__( + host, + port=port, + timeout=timeout, + source_address=source_address, + blocksize=blocksize, + socket_options=socket_options, + proxy=proxy, + proxy_config=proxy_config, + ) + + self.key_file = key_file + self.cert_file = cert_file + self.key_password = key_password + self.ssl_context = ssl_context + self.server_hostname = server_hostname + self.assert_hostname = assert_hostname + self.assert_fingerprint = assert_fingerprint + self.ssl_version = ssl_version + self.ssl_minimum_version = ssl_minimum_version + self.ssl_maximum_version = ssl_maximum_version + self.ca_certs = ca_certs and os.path.expanduser(ca_certs) + self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir) + self.ca_cert_data = ca_cert_data + + # cert_reqs depends on ssl_context so calculate last. + if cert_reqs is None: + if self.ssl_context is not None: + cert_reqs = self.ssl_context.verify_mode + else: + cert_reqs = resolve_cert_reqs(None) + self.cert_reqs = cert_reqs + self._connect_callback = None + + def set_cert( + self, + key_file: str | None = None, + cert_file: str | None = None, + cert_reqs: int | str | None = None, + key_password: str | None = None, + ca_certs: str | None = None, + assert_hostname: None | str | typing.Literal[False] = None, + assert_fingerprint: str | None = None, + ca_cert_dir: str | None = None, + ca_cert_data: None | str | bytes = None, + ) -> None: + """ + This method should only be called once, before the connection is used. + """ + warnings.warn( + "HTTPSConnection.set_cert() is deprecated and will be removed " + "in urllib3 v2.1.0. Instead provide the parameters to the " + "HTTPSConnection constructor.", + category=DeprecationWarning, + stacklevel=2, + ) + + # If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also + # have an SSLContext object in which case we'll use its verify_mode. + if cert_reqs is None: + if self.ssl_context is not None: + cert_reqs = self.ssl_context.verify_mode + else: + cert_reqs = resolve_cert_reqs(None) + + self.key_file = key_file + self.cert_file = cert_file + self.cert_reqs = cert_reqs + self.key_password = key_password + self.assert_hostname = assert_hostname + self.assert_fingerprint = assert_fingerprint + self.ca_certs = ca_certs and os.path.expanduser(ca_certs) + self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir) + self.ca_cert_data = ca_cert_data + + def connect(self) -> None: + # Today we don't need to be doing this step before the /actual/ socket + # connection, however in the future we'll need to decide whether to + # create a new socket or re-use an existing "shared" socket as a part + # of the HTTP/2 handshake dance. + if self._tunnel_host is not None and self._tunnel_port is not None: + probe_http2_host = self._tunnel_host + probe_http2_port = self._tunnel_port + else: + probe_http2_host = self.host + probe_http2_port = self.port + + # Check if the target origin supports HTTP/2. + # If the value comes back as 'None' it means that the current thread + # is probing for HTTP/2 support. Otherwise, we're waiting for another + # probe to complete, or we get a value right away. + target_supports_http2: bool | None + if "h2" in ssl_.ALPN_PROTOCOLS: + target_supports_http2 = http2_probe.acquire_and_get( + host=probe_http2_host, port=probe_http2_port + ) + else: + # If HTTP/2 isn't going to be offered it doesn't matter if + # the target supports HTTP/2. Don't want to make a probe. + target_supports_http2 = False + + if self._connect_callback is not None: + self._connect_callback( + "before connect", + thread_id=threading.get_ident(), + target_supports_http2=target_supports_http2, + ) + + try: + sock: socket.socket | ssl.SSLSocket + self.sock = sock = self._new_conn() + server_hostname: str = self.host + tls_in_tls = False + + # Do we need to establish a tunnel? + if self.proxy_is_tunneling: + # We're tunneling to an HTTPS origin so need to do TLS-in-TLS. + if self._tunnel_scheme == "https": + # _connect_tls_proxy will verify and assign proxy_is_verified + self.sock = sock = self._connect_tls_proxy(self.host, sock) + tls_in_tls = True + elif self._tunnel_scheme == "http": + self.proxy_is_verified = False + + # If we're tunneling it means we're connected to our proxy. + self._has_connected_to_proxy = True + + self._tunnel() + # Override the host with the one we're requesting data from. + server_hostname = typing.cast(str, self._tunnel_host) + + if self.server_hostname is not None: + server_hostname = self.server_hostname + + is_time_off = datetime.date.today() < RECENT_DATE + if is_time_off: + warnings.warn( + ( + f"System time is way off (before {RECENT_DATE}). This will probably " + "lead to SSL verification errors" + ), + SystemTimeWarning, + ) + + # Remove trailing '.' from fqdn hostnames to allow certificate validation + server_hostname_rm_dot = server_hostname.rstrip(".") + + sock_and_verified = _ssl_wrap_socket_and_match_hostname( + sock=sock, + cert_reqs=self.cert_reqs, + ssl_version=self.ssl_version, + ssl_minimum_version=self.ssl_minimum_version, + ssl_maximum_version=self.ssl_maximum_version, + ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, + ca_cert_data=self.ca_cert_data, + cert_file=self.cert_file, + key_file=self.key_file, + key_password=self.key_password, + server_hostname=server_hostname_rm_dot, + ssl_context=self.ssl_context, + tls_in_tls=tls_in_tls, + assert_hostname=self.assert_hostname, + assert_fingerprint=self.assert_fingerprint, + ) + self.sock = sock_and_verified.socket + + # If an error occurs during connection/handshake we may need to release + # our lock so another connection can probe the origin. + except BaseException: + if self._connect_callback is not None: + self._connect_callback( + "after connect failure", + thread_id=threading.get_ident(), + target_supports_http2=target_supports_http2, + ) + + if target_supports_http2 is None: + http2_probe.set_and_release( + host=probe_http2_host, port=probe_http2_port, supports_http2=None + ) + raise + + # If this connection doesn't know if the origin supports HTTP/2 + # we report back to the HTTP/2 probe our result. + if target_supports_http2 is None: + supports_http2 = sock_and_verified.socket.selected_alpn_protocol() == "h2" + http2_probe.set_and_release( + host=probe_http2_host, + port=probe_http2_port, + supports_http2=supports_http2, + ) + + # Forwarding proxies can never have a verified target since + # the proxy is the one doing the verification. Should instead + # use a CONNECT tunnel in order to verify the target. + # See: https://github.com/urllib3/urllib3/issues/3267. + if self.proxy_is_forwarding: + self.is_verified = False + else: + self.is_verified = sock_and_verified.is_verified + + # If there's a proxy to be connected to we are fully connected. + # This is set twice (once above and here) due to forwarding proxies + # not using tunnelling. + self._has_connected_to_proxy = bool(self.proxy) + + # Set `self.proxy_is_verified` unless it's already set while + # establishing a tunnel. + if self._has_connected_to_proxy and self.proxy_is_verified is None: + self.proxy_is_verified = sock_and_verified.is_verified + + def _connect_tls_proxy(self, hostname: str, sock: socket.socket) -> ssl.SSLSocket: + """ + Establish a TLS connection to the proxy using the provided SSL context. + """ + # `_connect_tls_proxy` is called when self._tunnel_host is truthy. + proxy_config = typing.cast(ProxyConfig, self.proxy_config) + ssl_context = proxy_config.ssl_context + sock_and_verified = _ssl_wrap_socket_and_match_hostname( + sock, + cert_reqs=self.cert_reqs, + ssl_version=self.ssl_version, + ssl_minimum_version=self.ssl_minimum_version, + ssl_maximum_version=self.ssl_maximum_version, + ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, + ca_cert_data=self.ca_cert_data, + server_hostname=hostname, + ssl_context=ssl_context, + assert_hostname=proxy_config.assert_hostname, + assert_fingerprint=proxy_config.assert_fingerprint, + # Features that aren't implemented for proxies yet: + cert_file=None, + key_file=None, + key_password=None, + tls_in_tls=False, + ) + self.proxy_is_verified = sock_and_verified.is_verified + return sock_and_verified.socket # type: ignore[return-value] + + +class _WrappedAndVerifiedSocket(typing.NamedTuple): + """ + Wrapped socket and whether the connection is + verified after the TLS handshake + """ + + socket: ssl.SSLSocket | SSLTransport + is_verified: bool + + +def _ssl_wrap_socket_and_match_hostname( + sock: socket.socket, + *, + cert_reqs: None | str | int, + ssl_version: None | str | int, + ssl_minimum_version: int | None, + ssl_maximum_version: int | None, + cert_file: str | None, + key_file: str | None, + key_password: str | None, + ca_certs: str | None, + ca_cert_dir: str | None, + ca_cert_data: None | str | bytes, + assert_hostname: None | str | typing.Literal[False], + assert_fingerprint: str | None, + server_hostname: str | None, + ssl_context: ssl.SSLContext | None, + tls_in_tls: bool = False, +) -> _WrappedAndVerifiedSocket: + """Logic for constructing an SSLContext from all TLS parameters, passing + that down into ssl_wrap_socket, and then doing certificate verification + either via hostname or fingerprint. This function exists to guarantee + that both proxies and targets have the same behavior when connecting via TLS. + """ + default_ssl_context = False + if ssl_context is None: + default_ssl_context = True + context = create_urllib3_context( + ssl_version=resolve_ssl_version(ssl_version), + ssl_minimum_version=ssl_minimum_version, + ssl_maximum_version=ssl_maximum_version, + cert_reqs=resolve_cert_reqs(cert_reqs), + ) + else: + context = ssl_context + + context.verify_mode = resolve_cert_reqs(cert_reqs) + + # In some cases, we want to verify hostnames ourselves + if ( + # `ssl` can't verify fingerprints or alternate hostnames + assert_fingerprint + or assert_hostname + # assert_hostname can be set to False to disable hostname checking + or assert_hostname is False + # We still support OpenSSL 1.0.2, which prevents us from verifying + # hostnames easily: https://github.com/pyca/pyopenssl/pull/933 + or ssl_.IS_PYOPENSSL + or not ssl_.HAS_NEVER_CHECK_COMMON_NAME + ): + context.check_hostname = False + + # Try to load OS default certs if none are given. We need to do the hasattr() check + # for custom pyOpenSSL SSLContext objects because they don't support + # load_default_certs(). + if ( + not ca_certs + and not ca_cert_dir + and not ca_cert_data + and default_ssl_context + and hasattr(context, "load_default_certs") + ): + context.load_default_certs() + + # Ensure that IPv6 addresses are in the proper format and don't have a + # scope ID. Python's SSL module fails to recognize scoped IPv6 addresses + # and interprets them as DNS hostnames. + if server_hostname is not None: + normalized = server_hostname.strip("[]") + if "%" in normalized: + normalized = normalized[: normalized.rfind("%")] + if is_ipaddress(normalized): + server_hostname = normalized + + ssl_sock = ssl_wrap_socket( + sock=sock, + keyfile=key_file, + certfile=cert_file, + key_password=key_password, + ca_certs=ca_certs, + ca_cert_dir=ca_cert_dir, + ca_cert_data=ca_cert_data, + server_hostname=server_hostname, + ssl_context=context, + tls_in_tls=tls_in_tls, + ) + + try: + if assert_fingerprint: + _assert_fingerprint( + ssl_sock.getpeercert(binary_form=True), assert_fingerprint + ) + elif ( + context.verify_mode != ssl.CERT_NONE + and not context.check_hostname + and assert_hostname is not False + ): + cert: _TYPE_PEER_CERT_RET_DICT = ssl_sock.getpeercert() # type: ignore[assignment] + + # Need to signal to our match_hostname whether to use 'commonName' or not. + # If we're using our own constructed SSLContext we explicitly set 'False' + # because PyPy hard-codes 'True' from SSLContext.hostname_checks_common_name. + if default_ssl_context: + hostname_checks_common_name = False + else: + hostname_checks_common_name = ( + getattr(context, "hostname_checks_common_name", False) or False + ) + + _match_hostname( + cert, + assert_hostname or server_hostname, # type: ignore[arg-type] + hostname_checks_common_name, + ) + + return _WrappedAndVerifiedSocket( + socket=ssl_sock, + is_verified=context.verify_mode == ssl.CERT_REQUIRED + or bool(assert_fingerprint), + ) + except BaseException: + ssl_sock.close() + raise + + +def _match_hostname( + cert: _TYPE_PEER_CERT_RET_DICT | None, + asserted_hostname: str, + hostname_checks_common_name: bool = False, +) -> None: + # Our upstream implementation of ssl.match_hostname() + # only applies this normalization to IP addresses so it doesn't + # match DNS SANs so we do the same thing! + stripped_hostname = asserted_hostname.strip("[]") + if is_ipaddress(stripped_hostname): + asserted_hostname = stripped_hostname + + try: + match_hostname(cert, asserted_hostname, hostname_checks_common_name) + except CertificateError as e: + log.warning( + "Certificate did not match expected hostname: %s. Certificate: %s", + asserted_hostname, + cert, + ) + # Add cert to exception and reraise so client code can inspect + # the cert when catching the exception, if they want to + e._peer_cert = cert # type: ignore[attr-defined] + raise + + +def _wrap_proxy_error(err: Exception, proxy_scheme: str | None) -> ProxyError: + # Look for the phrase 'wrong version number', if found + # then we should warn the user that we're very sure that + # this proxy is HTTP-only and they have a configuration issue. + error_normalized = " ".join(re.split("[^a-z]", str(err).lower())) + is_likely_http_proxy = ( + "wrong version number" in error_normalized + or "unknown protocol" in error_normalized + or "record layer failure" in error_normalized + ) + http_proxy_warning = ( + ". Your proxy appears to only use HTTP and not HTTPS, " + "try changing your proxy URL to be HTTP. See: " + "https://urllib3.readthedocs.io/en/latest/advanced-usage.html" + "#https-proxy-error-http-proxy" + ) + new_err = ProxyError( + f"Unable to connect to proxy" + f"{http_proxy_warning if is_likely_http_proxy and proxy_scheme == 'https' else ''}", + err, + ) + new_err.__cause__ = err + return new_err + + +def _get_default_user_agent() -> str: + return f"python-urllib3/{__version__}" + + +class DummyConnection: + """Used to detect a failed ConnectionCls import.""" + + +if not ssl: + HTTPSConnection = DummyConnection # type: ignore[misc, assignment] # noqa: F811 + + +VerifiedHTTPSConnection = HTTPSConnection + + +def _url_from_connection( + conn: HTTPConnection | HTTPSConnection, path: str | None = None +) -> str: + """Returns the URL from a given connection. This is mainly used for testing and logging.""" + + scheme = "https" if isinstance(conn, HTTPSConnection) else "http" + + return Url(scheme=scheme, host=conn.host, port=conn.port, path=path).url diff --git a/venv/lib/python3.13/site-packages/urllib3/connectionpool.py b/venv/lib/python3.13/site-packages/urllib3/connectionpool.py new file mode 100644 index 0000000000000000000000000000000000000000..3a0685b4cdd0562e508b9dd032765b5c759ea61e --- /dev/null +++ b/venv/lib/python3.13/site-packages/urllib3/connectionpool.py @@ -0,0 +1,1178 @@ +from __future__ import annotations + +import errno +import logging +import queue +import sys +import typing +import warnings +import weakref +from socket import timeout as SocketTimeout +from types import TracebackType + +from ._base_connection import _TYPE_BODY +from ._collections import HTTPHeaderDict +from ._request_methods import RequestMethods +from .connection import ( + BaseSSLError, + BrokenPipeError, + DummyConnection, + HTTPConnection, + HTTPException, + HTTPSConnection, + ProxyConfig, + _wrap_proxy_error, +) +from .connection import port_by_scheme as port_by_scheme +from .exceptions import ( + ClosedPoolError, + EmptyPoolError, + FullPoolError, + HostChangedError, + InsecureRequestWarning, + LocationValueError, + MaxRetryError, + NewConnectionError, + ProtocolError, + ProxyError, + ReadTimeoutError, + SSLError, + TimeoutError, +) +from .response import BaseHTTPResponse +from .util.connection import is_connection_dropped +from .util.proxy import connection_requires_http_tunnel +from .util.request import _TYPE_BODY_POSITION, set_file_position +from .util.retry import Retry +from .util.ssl_match_hostname import CertificateError +from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_DEFAULT, Timeout +from .util.url import Url, _encode_target +from .util.url import _normalize_host as normalize_host +from .util.url import parse_url +from .util.util import to_str + +if typing.TYPE_CHECKING: + import ssl + + from typing_extensions import Self + + from ._base_connection import BaseHTTPConnection, BaseHTTPSConnection + +log = logging.getLogger(__name__) + +_TYPE_TIMEOUT = typing.Union[Timeout, float, _TYPE_DEFAULT, None] + + +# Pool objects +class ConnectionPool: + """ + Base class for all connection pools, such as + :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. + + .. note:: + ConnectionPool.urlopen() does not normalize or percent-encode target URIs + which is useful if your target server doesn't support percent-encoded + target URIs. + """ + + scheme: str | None = None + QueueCls = queue.LifoQueue + + def __init__(self, host: str, port: int | None = None) -> None: + if not host: + raise LocationValueError("No host specified.") + + self.host = _normalize_host(host, scheme=self.scheme) + self.port = port + + # This property uses 'normalize_host()' (not '_normalize_host()') + # to avoid removing square braces around IPv6 addresses. + # This value is sent to `HTTPConnection.set_tunnel()` if called + # because square braces are required for HTTP CONNECT tunneling. + self._tunnel_host = normalize_host(host, scheme=self.scheme).lower() + + def __str__(self) -> str: + return f"{type(self).__name__}(host={self.host!r}, port={self.port!r})" + + def __enter__(self) -> Self: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> typing.Literal[False]: + self.close() + # Return False to re-raise any potential exceptions + return False + + def close(self) -> None: + """ + Close all pooled connections and disable the pool. + """ + + +# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252 +_blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK} + + +class HTTPConnectionPool(ConnectionPool, RequestMethods): + """ + Thread-safe connection pool for one host. + + :param host: + Host used for this HTTP Connection (e.g. "localhost"), passed into + :class:`http.client.HTTPConnection`. + + :param port: + Port used for this HTTP Connection (None is equivalent to 80), passed + into :class:`http.client.HTTPConnection`. + + :param timeout: + Socket timeout in seconds for each individual connection. This can + be a float or integer, which sets the timeout for the HTTP request, + or an instance of :class:`urllib3.util.Timeout` which gives you more + fine-grained control over request timeouts. After the constructor has + been parsed, this is always a `urllib3.util.Timeout` object. + + :param maxsize: + Number of connections to save that can be reused. More than 1 is useful + in multithreaded situations. If ``block`` is set to False, more + connections will be created but they will not be saved once they've + been used. + + :param block: + If set to True, no more than ``maxsize`` connections will be used at + a time. When no free connections are available, the call will block + until a connection has been released. This is a useful side effect for + particular multithreaded situations where one does not want to use more + than maxsize connections per host to prevent flooding. + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + + :param retries: + Retry configuration to use by default with requests in this pool. + + :param _proxy: + Parsed proxy URL, should not be used directly, instead, see + :class:`urllib3.ProxyManager` + + :param _proxy_headers: + A dictionary with proxy headers, should not be used directly, + instead, see :class:`urllib3.ProxyManager` + + :param \\**conn_kw: + Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`, + :class:`urllib3.connection.HTTPSConnection` instances. + """ + + scheme = "http" + ConnectionCls: type[BaseHTTPConnection] | type[BaseHTTPSConnection] = HTTPConnection + + def __init__( + self, + host: str, + port: int | None = None, + timeout: _TYPE_TIMEOUT | None = _DEFAULT_TIMEOUT, + maxsize: int = 1, + block: bool = False, + headers: typing.Mapping[str, str] | None = None, + retries: Retry | bool | int | None = None, + _proxy: Url | None = None, + _proxy_headers: typing.Mapping[str, str] | None = None, + _proxy_config: ProxyConfig | None = None, + **conn_kw: typing.Any, + ): + ConnectionPool.__init__(self, host, port) + RequestMethods.__init__(self, headers) + + if not isinstance(timeout, Timeout): + timeout = Timeout.from_float(timeout) + + if retries is None: + retries = Retry.DEFAULT + + self.timeout = timeout + self.retries = retries + + self.pool: queue.LifoQueue[typing.Any] | None = self.QueueCls(maxsize) + self.block = block + + self.proxy = _proxy + self.proxy_headers = _proxy_headers or {} + self.proxy_config = _proxy_config + + # Fill the queue up so that doing get() on it will block properly + for _ in range(maxsize): + self.pool.put(None) + + # These are mostly for testing and debugging purposes. + self.num_connections = 0 + self.num_requests = 0 + self.conn_kw = conn_kw + + if self.proxy: + # Enable Nagle's algorithm for proxies, to avoid packet fragmentation. + # We cannot know if the user has added default socket options, so we cannot replace the + # list. + self.conn_kw.setdefault("socket_options", []) + + self.conn_kw["proxy"] = self.proxy + self.conn_kw["proxy_config"] = self.proxy_config + + # Do not pass 'self' as callback to 'finalize'. + # Then the 'finalize' would keep an endless living (leak) to self. + # By just passing a reference to the pool allows the garbage collector + # to free self if nobody else has a reference to it. + pool = self.pool + + # Close all the HTTPConnections in the pool before the + # HTTPConnectionPool object is garbage collected. + weakref.finalize(self, _close_pool_connections, pool) + + def _new_conn(self) -> BaseHTTPConnection: + """ + Return a fresh :class:`HTTPConnection`. + """ + self.num_connections += 1 + log.debug( + "Starting new HTTP connection (%d): %s:%s", + self.num_connections, + self.host, + self.port or "80", + ) + + conn = self.ConnectionCls( + host=self.host, + port=self.port, + timeout=self.timeout.connect_timeout, + **self.conn_kw, + ) + return conn + + def _get_conn(self, timeout: float | None = None) -> BaseHTTPConnection: + """ + Get a connection. Will return a pooled connection if one is available. + + If no connections are available and :prop:`.block` is ``False``, then a + fresh connection is returned. + + :param timeout: + Seconds to wait before giving up and raising + :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and + :prop:`.block` is ``True``. + """ + conn = None + + if self.pool is None: + raise ClosedPoolError(self, "Pool is closed.") + + try: + conn = self.pool.get(block=self.block, timeout=timeout) + + except AttributeError: # self.pool is None + raise ClosedPoolError(self, "Pool is closed.") from None # Defensive: + + except queue.Empty: + if self.block: + raise EmptyPoolError( + self, + "Pool is empty and a new connection can't be opened due to blocking mode.", + ) from None + pass # Oh well, we'll create a new connection then + + # If this is a persistent connection, check if it got disconnected + if conn and is_connection_dropped(conn): + log.debug("Resetting dropped connection: %s", self.host) + conn.close() + + return conn or self._new_conn() + + def _put_conn(self, conn: BaseHTTPConnection | None) -> None: + """ + Put a connection back into the pool. + + :param conn: + Connection object for the current host and port as returned by + :meth:`._new_conn` or :meth:`._get_conn`. + + If the pool is already full, the connection is closed and discarded + because we exceeded maxsize. If connections are discarded frequently, + then maxsize should be increased. + + If the pool is closed, then the connection will be closed and discarded. + """ + if self.pool is not None: + try: + self.pool.put(conn, block=False) + return # Everything is dandy, done. + except AttributeError: + # self.pool is None. + pass + except queue.Full: + # Connection never got put back into the pool, close it. + if conn: + conn.close() + + if self.block: + # This should never happen if you got the conn from self._get_conn + raise FullPoolError( + self, + "Pool reached maximum size and no more connections are allowed.", + ) from None + + log.warning( + "Connection pool is full, discarding connection: %s. Connection pool size: %s", + self.host, + self.pool.qsize(), + ) + + # Connection never got put back into the pool, close it. + if conn: + conn.close() + + def _validate_conn(self, conn: BaseHTTPConnection) -> None: + """ + Called right before a request is made, after the socket is created. + """ + + def _prepare_proxy(self, conn: BaseHTTPConnection) -> None: + # Nothing to do for HTTP connections. + pass + + def _get_timeout(self, timeout: _TYPE_TIMEOUT) -> Timeout: + """Helper that always returns a :class:`urllib3.util.Timeout`""" + if timeout is _DEFAULT_TIMEOUT: + return self.timeout.clone() + + if isinstance(timeout, Timeout): + return timeout.clone() + else: + # User passed us an int/float. This is for backwards compatibility, + # can be removed later + return Timeout.from_float(timeout) + + def _raise_timeout( + self, + err: BaseSSLError | OSError | SocketTimeout, + url: str, + timeout_value: _TYPE_TIMEOUT | None, + ) -> None: + """Is the error actually a timeout? Will raise a ReadTimeout or pass""" + + if isinstance(err, SocketTimeout): + raise ReadTimeoutError( + self, url, f"Read timed out. (read timeout={timeout_value})" + ) from err + + # See the above comment about EAGAIN in Python 3. + if hasattr(err, "errno") and err.errno in _blocking_errnos: + raise ReadTimeoutError( + self, url, f"Read timed out. (read timeout={timeout_value})" + ) from err + + def _make_request( + self, + conn: BaseHTTPConnection, + method: str, + url: str, + body: _TYPE_BODY | None = None, + headers: typing.Mapping[str, str] | None = None, + retries: Retry | None = None, + timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, + chunked: bool = False, + response_conn: BaseHTTPConnection | None = None, + preload_content: bool = True, + decode_content: bool = True, + enforce_content_length: bool = True, + ) -> BaseHTTPResponse: + """ + Perform a request on a given urllib connection object taken from our + pool. + + :param conn: + a connection from one of our connection pools + + :param method: + HTTP request method (such as GET, POST, PUT, etc.) + + :param url: + The URL to perform the request on. + + :param body: + Data to send in the request body, either :class:`str`, :class:`bytes`, + an iterable of :class:`str`/:class:`bytes`, or a file-like object. + + :param headers: + Dictionary of custom headers to send, such as User-Agent, + If-None-Match, etc. If None, pool headers are used. If provided, + these headers completely replace any pool-specific headers. + + :param retries: + Configure the number of retries to allow before raising a + :class:`~urllib3.exceptions.MaxRetryError` exception. + + Pass ``None`` to retry until you receive a response. Pass a + :class:`~urllib3.util.retry.Retry` object for fine-grained control + over different types of retries. + Pass an integer number to retry connection errors that many times, + but no other types of errors. Pass zero to never retry. + + If ``False``, then retries are disabled and any exception is raised + immediately. Also, instead of raising a MaxRetryError on redirects, + the redirect response will be returned. + + :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. + + :param timeout: + If specified, overrides the default timeout for this one + request. It may be a float (in seconds) or an instance of + :class:`urllib3.util.Timeout`. + + :param chunked: + If True, urllib3 will send the body using chunked transfer + encoding. Otherwise, urllib3 will send the body using the standard + content-length form. Defaults to False. + + :param response_conn: + Set this to ``None`` if you will handle releasing the connection or + set the connection to have the response release it. + + :param preload_content: + If True, the response's body will be preloaded during construction. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + + :param enforce_content_length: + Enforce content length checking. Body returned by server must match + value of Content-Length header, if present. Otherwise, raise error. + """ + self.num_requests += 1 + + timeout_obj = self._get_timeout(timeout) + timeout_obj.start_connect() + conn.timeout = Timeout.resolve_default_timeout(timeout_obj.connect_timeout) + + try: + # Trigger any extra validation we need to do. + try: + self._validate_conn(conn) + except (SocketTimeout, BaseSSLError) as e: + self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) + raise + + # _validate_conn() starts the connection to an HTTPS proxy + # so we need to wrap errors with 'ProxyError' here too. + except ( + OSError, + NewConnectionError, + TimeoutError, + BaseSSLError, + CertificateError, + SSLError, + ) as e: + new_e: Exception = e + if isinstance(e, (BaseSSLError, CertificateError)): + new_e = SSLError(e) + # If the connection didn't successfully connect to it's proxy + # then there + if isinstance( + new_e, (OSError, NewConnectionError, TimeoutError, SSLError) + ) and (conn and conn.proxy and not conn.has_connected_to_proxy): + new_e = _wrap_proxy_error(new_e, conn.proxy.scheme) + raise new_e + + # conn.request() calls http.client.*.request, not the method in + # urllib3.request. It also calls makefile (recv) on the socket. + try: + conn.request( + method, + url, + body=body, + headers=headers, + chunked=chunked, + preload_content=preload_content, + decode_content=decode_content, + enforce_content_length=enforce_content_length, + ) + + # We are swallowing BrokenPipeError (errno.EPIPE) since the server is + # legitimately able to close the connection after sending a valid response. + # With this behaviour, the received response is still readable. + except BrokenPipeError: + pass + except OSError as e: + # MacOS/Linux + # EPROTOTYPE and ECONNRESET are needed on macOS + # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/ + # Condition changed later to emit ECONNRESET instead of only EPROTOTYPE. + if e.errno != errno.EPROTOTYPE and e.errno != errno.ECONNRESET: + raise + + # Reset the timeout for the recv() on the socket + read_timeout = timeout_obj.read_timeout + + if not conn.is_closed: + # In Python 3 socket.py will catch EAGAIN and return None when you + # try and read into the file pointer created by http.client, which + # instead raises a BadStatusLine exception. Instead of catching + # the exception and assuming all BadStatusLine exceptions are read + # timeouts, check for a zero timeout before making the request. + if read_timeout == 0: + raise ReadTimeoutError( + self, url, f"Read timed out. (read timeout={read_timeout})" + ) + conn.timeout = read_timeout + + # Receive the response from the server + try: + response = conn.getresponse() + except (BaseSSLError, OSError) as e: + self._raise_timeout(err=e, url=url, timeout_value=read_timeout) + raise + + # Set properties that are used by the pooling layer. + response.retries = retries + response._connection = response_conn # type: ignore[attr-defined] + response._pool = self # type: ignore[attr-defined] + + log.debug( + '%s://%s:%s "%s %s %s" %s %s', + self.scheme, + self.host, + self.port, + method, + url, + response.version_string, + response.status, + response.length_remaining, + ) + + return response + + def close(self) -> None: + """ + Close all pooled connections and disable the pool. + """ + if self.pool is None: + return + # Disable access to the pool + old_pool, self.pool = self.pool, None + + # Close all the HTTPConnections in the pool. + _close_pool_connections(old_pool) + + def is_same_host(self, url: str) -> bool: + """ + Check if the given ``url`` is a member of the same host as this + connection pool. + """ + if url.startswith("/"): + return True + + # TODO: Add optional support for socket.gethostbyname checking. + scheme, _, host, port, *_ = parse_url(url) + scheme = scheme or "http" + if host is not None: + host = _normalize_host(host, scheme=scheme) + + # Use explicit default port for comparison when none is given + if self.port and not port: + port = port_by_scheme.get(scheme) + elif not self.port and port == port_by_scheme.get(scheme): + port = None + + return (scheme, host, port) == (self.scheme, self.host, self.port) + + def urlopen( # type: ignore[override] + self, + method: str, + url: str, + body: _TYPE_BODY | None = None, + headers: typing.Mapping[str, str] | None = None, + retries: Retry | bool | int | None = None, + redirect: bool = True, + assert_same_host: bool = True, + timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, + pool_timeout: int | None = None, + release_conn: bool | None = None, + chunked: bool = False, + body_pos: _TYPE_BODY_POSITION | None = None, + preload_content: bool = True, + decode_content: bool = True, + **response_kw: typing.Any, + ) -> BaseHTTPResponse: + """ + Get a connection from the pool and perform an HTTP request. This is the + lowest level call for making a request, so you'll need to specify all + the raw details. + + .. note:: + + More commonly, it's appropriate to use a convenience method + such as :meth:`request`. + + .. note:: + + `release_conn` will only behave as expected if + `preload_content=False` because we want to make + `preload_content=False` the default behaviour someday soon without + breaking backwards compatibility. + + :param method: + HTTP request method (such as GET, POST, PUT, etc.) + + :param url: + The URL to perform the request on. + + :param body: + Data to send in the request body, either :class:`str`, :class:`bytes`, + an iterable of :class:`str`/:class:`bytes`, or a file-like object. + + :param headers: + Dictionary of custom headers to send, such as User-Agent, + If-None-Match, etc. If None, pool headers are used. If provided, + these headers completely replace any pool-specific headers. + + :param retries: + Configure the number of retries to allow before raising a + :class:`~urllib3.exceptions.MaxRetryError` exception. + + If ``None`` (default) will retry 3 times, see ``Retry.DEFAULT``. Pass a + :class:`~urllib3.util.retry.Retry` object for fine-grained control + over different types of retries. + Pass an integer number to retry connection errors that many times, + but no other types of errors. Pass zero to never retry. + + If ``False``, then retries are disabled and any exception is raised + immediately. Also, instead of raising a MaxRetryError on redirects, + the redirect response will be returned. + + :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. + + :param redirect: + If True, automatically handle redirects (status codes 301, 302, + 303, 307, 308). Each redirect counts as a retry. Disabling retries + will disable redirect, too. + + :param assert_same_host: + If ``True``, will make sure that the host of the pool requests is + consistent else will raise HostChangedError. When ``False``, you can + use the pool on an HTTP proxy and request foreign hosts. + + :param timeout: + If specified, overrides the default timeout for this one + request. It may be a float (in seconds) or an instance of + :class:`urllib3.util.Timeout`. + + :param pool_timeout: + If set and the pool is set to block=True, then this method will + block for ``pool_timeout`` seconds and raise EmptyPoolError if no + connection is available within the time period. + + :param bool preload_content: + If True, the response's body will be preloaded into memory. + + :param bool decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + + :param release_conn: + If False, then the urlopen call will not release the connection + back into the pool once a response is received (but will release if + you read the entire contents of the response such as when + `preload_content=True`). This is useful if you're not preloading + the response's content immediately. You will need to call + ``r.release_conn()`` on the response ``r`` to return the connection + back into the pool. If None, it takes the value of ``preload_content`` + which defaults to ``True``. + + :param bool chunked: + If True, urllib3 will send the body using chunked transfer + encoding. Otherwise, urllib3 will send the body using the standard + content-length form. Defaults to False. + + :param int body_pos: + Position to seek to in file-like body in the event of a retry or + redirect. Typically this won't need to be set because urllib3 will + auto-populate the value when needed. + """ + parsed_url = parse_url(url) + destination_scheme = parsed_url.scheme + + if headers is None: + headers = self.headers + + if not isinstance(retries, Retry): + retries = Retry.from_int(retries, redirect=redirect, default=self.retries) + + if release_conn is None: + release_conn = preload_content + + # Check host + if assert_same_host and not self.is_same_host(url): + raise HostChangedError(self, url, retries) + + # Ensure that the URL we're connecting to is properly encoded + if url.startswith("/"): + url = to_str(_encode_target(url)) + else: + url = to_str(parsed_url.url) + + conn = None + + # Track whether `conn` needs to be released before + # returning/raising/recursing. Update this variable if necessary, and + # leave `release_conn` constant throughout the function. That way, if + # the function recurses, the original value of `release_conn` will be + # passed down into the recursive call, and its value will be respected. + # + # See issue #651 [1] for details. + # + # [1] + release_this_conn = release_conn + + http_tunnel_required = connection_requires_http_tunnel( + self.proxy, self.proxy_config, destination_scheme + ) + + # Merge the proxy headers. Only done when not using HTTP CONNECT. We + # have to copy the headers dict so we can safely change it without those + # changes being reflected in anyone else's copy. + if not http_tunnel_required: + headers = headers.copy() # type: ignore[attr-defined] + headers.update(self.proxy_headers) # type: ignore[union-attr] + + # Must keep the exception bound to a separate variable or else Python 3 + # complains about UnboundLocalError. + err = None + + # Keep track of whether we cleanly exited the except block. This + # ensures we do proper cleanup in finally. + clean_exit = False + + # Rewind body position, if needed. Record current position + # for future rewinds in the event of a redirect/retry. + body_pos = set_file_position(body, body_pos) + + try: + # Request a connection from the queue. + timeout_obj = self._get_timeout(timeout) + conn = self._get_conn(timeout=pool_timeout) + + conn.timeout = timeout_obj.connect_timeout # type: ignore[assignment] + + # Is this a closed/new connection that requires CONNECT tunnelling? + if self.proxy is not None and http_tunnel_required and conn.is_closed: + try: + self._prepare_proxy(conn) + except (BaseSSLError, OSError, SocketTimeout) as e: + self._raise_timeout( + err=e, url=self.proxy.url, timeout_value=conn.timeout + ) + raise + + # If we're going to release the connection in ``finally:``, then + # the response doesn't need to know about the connection. Otherwise + # it will also try to release it and we'll have a double-release + # mess. + response_conn = conn if not release_conn else None + + # Make the request on the HTTPConnection object + response = self._make_request( + conn, + method, + url, + timeout=timeout_obj, + body=body, + headers=headers, + chunked=chunked, + retries=retries, + response_conn=response_conn, + preload_content=preload_content, + decode_content=decode_content, + **response_kw, + ) + + # Everything went great! + clean_exit = True + + except EmptyPoolError: + # Didn't get a connection from the pool, no need to clean up + clean_exit = True + release_this_conn = False + raise + + except ( + TimeoutError, + HTTPException, + OSError, + ProtocolError, + BaseSSLError, + SSLError, + CertificateError, + ProxyError, + ) as e: + # Discard the connection for these exceptions. It will be + # replaced during the next _get_conn() call. + clean_exit = False + new_e: Exception = e + if isinstance(e, (BaseSSLError, CertificateError)): + new_e = SSLError(e) + if isinstance( + new_e, + ( + OSError, + NewConnectionError, + TimeoutError, + SSLError, + HTTPException, + ), + ) and (conn and conn.proxy and not conn.has_connected_to_proxy): + new_e = _wrap_proxy_error(new_e, conn.proxy.scheme) + elif isinstance(new_e, (OSError, HTTPException)): + new_e = ProtocolError("Connection aborted.", new_e) + + retries = retries.increment( + method, url, error=new_e, _pool=self, _stacktrace=sys.exc_info()[2] + ) + retries.sleep() + + # Keep track of the error for the retry warning. + err = e + + finally: + if not clean_exit: + # We hit some kind of exception, handled or otherwise. We need + # to throw the connection away unless explicitly told not to. + # Close the connection, set the variable to None, and make sure + # we put the None back in the pool to avoid leaking it. + if conn: + conn.close() + conn = None + release_this_conn = True + + if release_this_conn: + # Put the connection back to be reused. If the connection is + # expired then it will be None, which will get replaced with a + # fresh connection during _get_conn. + self._put_conn(conn) + + if not conn: + # Try again + log.warning( + "Retrying (%r) after connection broken by '%r': %s", retries, err, url + ) + return self.urlopen( + method, + url, + body, + headers, + retries, + redirect, + assert_same_host, + timeout=timeout, + pool_timeout=pool_timeout, + release_conn=release_conn, + chunked=chunked, + body_pos=body_pos, + preload_content=preload_content, + decode_content=decode_content, + **response_kw, + ) + + # Handle redirect? + redirect_location = redirect and response.get_redirect_location() + if redirect_location: + if response.status == 303: + # Change the method according to RFC 9110, Section 15.4.4. + method = "GET" + # And lose the body not to transfer anything sensitive. + body = None + headers = HTTPHeaderDict(headers)._prepare_for_method_change() + + try: + retries = retries.increment(method, url, response=response, _pool=self) + except MaxRetryError: + if retries.raise_on_redirect: + response.drain_conn() + raise + return response + + response.drain_conn() + retries.sleep_for_retry(response) + log.debug("Redirecting %s -> %s", url, redirect_location) + return self.urlopen( + method, + redirect_location, + body, + headers, + retries=retries, + redirect=redirect, + assert_same_host=assert_same_host, + timeout=timeout, + pool_timeout=pool_timeout, + release_conn=release_conn, + chunked=chunked, + body_pos=body_pos, + preload_content=preload_content, + decode_content=decode_content, + **response_kw, + ) + + # Check if we should retry the HTTP response. + has_retry_after = bool(response.headers.get("Retry-After")) + if retries.is_retry(method, response.status, has_retry_after): + try: + retries = retries.increment(method, url, response=response, _pool=self) + except MaxRetryError: + if retries.raise_on_status: + response.drain_conn() + raise + return response + + response.drain_conn() + retries.sleep(response) + log.debug("Retry: %s", url) + return self.urlopen( + method, + url, + body, + headers, + retries=retries, + redirect=redirect, + assert_same_host=assert_same_host, + timeout=timeout, + pool_timeout=pool_timeout, + release_conn=release_conn, + chunked=chunked, + body_pos=body_pos, + preload_content=preload_content, + decode_content=decode_content, + **response_kw, + ) + + return response + + +class HTTPSConnectionPool(HTTPConnectionPool): + """ + Same as :class:`.HTTPConnectionPool`, but HTTPS. + + :class:`.HTTPSConnection` uses one of ``assert_fingerprint``, + ``assert_hostname`` and ``host`` in this order to verify connections. + If ``assert_hostname`` is False, no verification is done. + + The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, + ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl` + is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade + the connection socket into an SSL socket. + """ + + scheme = "https" + ConnectionCls: type[BaseHTTPSConnection] = HTTPSConnection + + def __init__( + self, + host: str, + port: int | None = None, + timeout: _TYPE_TIMEOUT | None = _DEFAULT_TIMEOUT, + maxsize: int = 1, + block: bool = False, + headers: typing.Mapping[str, str] | None = None, + retries: Retry | bool | int | None = None, + _proxy: Url | None = None, + _proxy_headers: typing.Mapping[str, str] | None = None, + key_file: str | None = None, + cert_file: str | None = None, + cert_reqs: int | str | None = None, + key_password: str | None = None, + ca_certs: str | None = None, + ssl_version: int | str | None = None, + ssl_minimum_version: ssl.TLSVersion | None = None, + ssl_maximum_version: ssl.TLSVersion | None = None, + assert_hostname: str | typing.Literal[False] | None = None, + assert_fingerprint: str | None = None, + ca_cert_dir: str | None = None, + **conn_kw: typing.Any, + ) -> None: + super().__init__( + host, + port, + timeout, + maxsize, + block, + headers, + retries, + _proxy, + _proxy_headers, + **conn_kw, + ) + + self.key_file = key_file + self.cert_file = cert_file + self.cert_reqs = cert_reqs + self.key_password = key_password + self.ca_certs = ca_certs + self.ca_cert_dir = ca_cert_dir + self.ssl_version = ssl_version + self.ssl_minimum_version = ssl_minimum_version + self.ssl_maximum_version = ssl_maximum_version + self.assert_hostname = assert_hostname + self.assert_fingerprint = assert_fingerprint + + def _prepare_proxy(self, conn: HTTPSConnection) -> None: # type: ignore[override] + """Establishes a tunnel connection through HTTP CONNECT.""" + if self.proxy and self.proxy.scheme == "https": + tunnel_scheme = "https" + else: + tunnel_scheme = "http" + + conn.set_tunnel( + scheme=tunnel_scheme, + host=self._tunnel_host, + port=self.port, + headers=self.proxy_headers, + ) + conn.connect() + + def _new_conn(self) -> BaseHTTPSConnection: + """ + Return a fresh :class:`urllib3.connection.HTTPConnection`. + """ + self.num_connections += 1 + log.debug( + "Starting new HTTPS connection (%d): %s:%s", + self.num_connections, + self.host, + self.port or "443", + ) + + if not self.ConnectionCls or self.ConnectionCls is DummyConnection: # type: ignore[comparison-overlap] + raise ImportError( + "Can't connect to HTTPS URL because the SSL module is not available." + ) + + actual_host: str = self.host + actual_port = self.port + if self.proxy is not None and self.proxy.host is not None: + actual_host = self.proxy.host + actual_port = self.proxy.port + + return self.ConnectionCls( + host=actual_host, + port=actual_port, + timeout=self.timeout.connect_timeout, + cert_file=self.cert_file, + key_file=self.key_file, + key_password=self.key_password, + cert_reqs=self.cert_reqs, + ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, + assert_hostname=self.assert_hostname, + assert_fingerprint=self.assert_fingerprint, + ssl_version=self.ssl_version, + ssl_minimum_version=self.ssl_minimum_version, + ssl_maximum_version=self.ssl_maximum_version, + **self.conn_kw, + ) + + def _validate_conn(self, conn: BaseHTTPConnection) -> None: + """ + Called right before a request is made, after the socket is created. + """ + super()._validate_conn(conn) + + # Force connect early to allow us to validate the connection. + if conn.is_closed: + conn.connect() + + # TODO revise this, see https://github.com/urllib3/urllib3/issues/2791 + if not conn.is_verified and not conn.proxy_is_verified: + warnings.warn( + ( + f"Unverified HTTPS request is being made to host '{conn.host}'. " + "Adding certificate verification is strongly advised. See: " + "https://urllib3.readthedocs.io/en/latest/advanced-usage.html" + "#tls-warnings" + ), + InsecureRequestWarning, + ) + + +def connection_from_url(url: str, **kw: typing.Any) -> HTTPConnectionPool: + """ + Given a url, return an :class:`.ConnectionPool` instance of its host. + + This is a shortcut for not having to parse out the scheme, host, and port + of the url before creating an :class:`.ConnectionPool` instance. + + :param url: + Absolute URL string that must include the scheme. Port is optional. + + :param \\**kw: + Passes additional parameters to the constructor of the appropriate + :class:`.ConnectionPool`. Useful for specifying things like + timeout, maxsize, headers, etc. + + Example:: + + >>> conn = connection_from_url('http://google.com/') + >>> r = conn.request('GET', '/') + """ + scheme, _, host, port, *_ = parse_url(url) + scheme = scheme or "http" + port = port or port_by_scheme.get(scheme, 80) + if scheme == "https": + return HTTPSConnectionPool(host, port=port, **kw) # type: ignore[arg-type] + else: + return HTTPConnectionPool(host, port=port, **kw) # type: ignore[arg-type] + + +@typing.overload +def _normalize_host(host: None, scheme: str | None) -> None: ... + + +@typing.overload +def _normalize_host(host: str, scheme: str | None) -> str: ... + + +def _normalize_host(host: str | None, scheme: str | None) -> str | None: + """ + Normalize hosts for comparisons and use with sockets. + """ + + host = normalize_host(host, scheme) + + # httplib doesn't like it when we include brackets in IPv6 addresses + # Specifically, if we include brackets but also pass the port then + # httplib crazily doubles up the square brackets on the Host header. + # Instead, we need to make sure we never pass ``None`` as the port. + # However, for backward compatibility reasons we can't actually + # *assert* that. See http://bugs.python.org/issue28539 + if host and host.startswith("[") and host.endswith("]"): + host = host[1:-1] + return host + + +def _url_from_pool( + pool: HTTPConnectionPool | HTTPSConnectionPool, path: str | None = None +) -> str: + """Returns the URL from a given connection pool. This is mainly used for testing and logging.""" + return Url(scheme=pool.scheme, host=pool.host, port=pool.port, path=path).url + + +def _close_pool_connections(pool: queue.LifoQueue[typing.Any]) -> None: + """Drains a queue of connections and closes each one.""" + try: + while True: + conn = pool.get(block=False) + if conn: + conn.close() + except queue.Empty: + pass # Done. diff --git a/venv/lib/python3.13/site-packages/urllib3/exceptions.py b/venv/lib/python3.13/site-packages/urllib3/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..a0de9d6cc4f0cc88123d696daa4fd095e1743ece --- /dev/null +++ b/venv/lib/python3.13/site-packages/urllib3/exceptions.py @@ -0,0 +1,335 @@ +from __future__ import annotations + +import socket +import typing +import warnings +from email.errors import MessageDefect +from http.client import IncompleteRead as httplib_IncompleteRead + +if typing.TYPE_CHECKING: + from .connection import HTTPConnection + from .connectionpool import ConnectionPool + from .response import HTTPResponse + from .util.retry import Retry + +# Base Exceptions + + +class HTTPError(Exception): + """Base exception used by this module.""" + + +class HTTPWarning(Warning): + """Base warning used by this module.""" + + +_TYPE_REDUCE_RESULT = tuple[typing.Callable[..., object], tuple[object, ...]] + + +class PoolError(HTTPError): + """Base exception for errors caused within a pool.""" + + def __init__(self, pool: ConnectionPool, message: str) -> None: + self.pool = pool + self._message = message + super().__init__(f"{pool}: {message}") + + def __reduce__(self) -> _TYPE_REDUCE_RESULT: + # For pickling purposes. + return self.__class__, (None, self._message) + + +class RequestError(PoolError): + """Base exception for PoolErrors that have associated URLs.""" + + def __init__(self, pool: ConnectionPool, url: str, message: str) -> None: + self.url = url + super().__init__(pool, message) + + def __reduce__(self) -> _TYPE_REDUCE_RESULT: + # For pickling purposes. + return self.__class__, (None, self.url, self._message) + + +class SSLError(HTTPError): + """Raised when SSL certificate fails in an HTTPS connection.""" + + +class ProxyError(HTTPError): + """Raised when the connection to a proxy fails.""" + + # The original error is also available as __cause__. + original_error: Exception + + def __init__(self, message: str, error: Exception) -> None: + super().__init__(message, error) + self.original_error = error + + +class DecodeError(HTTPError): + """Raised when automatic decoding based on Content-Type fails.""" + + +class ProtocolError(HTTPError): + """Raised when something unexpected happens mid-request/response.""" + + +#: Renamed to ProtocolError but aliased for backwards compatibility. +ConnectionError = ProtocolError + + +# Leaf Exceptions + + +class MaxRetryError(RequestError): + """Raised when the maximum number of retries is exceeded. + + :param pool: The connection pool + :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool` + :param str url: The requested Url + :param reason: The underlying error + :type reason: :class:`Exception` + + """ + + def __init__( + self, pool: ConnectionPool, url: str, reason: Exception | None = None + ) -> None: + self.reason = reason + + message = f"Max retries exceeded with url: {url} (Caused by {reason!r})" + + super().__init__(pool, url, message) + + def __reduce__(self) -> _TYPE_REDUCE_RESULT: + # For pickling purposes. + return self.__class__, (None, self.url, self.reason) + + +class HostChangedError(RequestError): + """Raised when an existing pool gets a request for a foreign host.""" + + def __init__( + self, pool: ConnectionPool, url: str, retries: Retry | int = 3 + ) -> None: + message = f"Tried to open a foreign host with url: {url}" + super().__init__(pool, url, message) + self.retries = retries + + +class TimeoutStateError(HTTPError): + """Raised when passing an invalid state to a timeout""" + + +class TimeoutError(HTTPError): + """Raised when a socket timeout error occurs. + + Catching this error will catch both :exc:`ReadTimeoutErrors + ` and :exc:`ConnectTimeoutErrors `. + """ + + +class ReadTimeoutError(TimeoutError, RequestError): + """Raised when a socket timeout occurs while receiving data from a server""" + + +# This timeout error does not have a URL attached and needs to inherit from the +# base HTTPError +class ConnectTimeoutError(TimeoutError): + """Raised when a socket timeout occurs while connecting to a server""" + + +class NewConnectionError(ConnectTimeoutError, HTTPError): + """Raised when we fail to establish a new connection. Usually ECONNREFUSED.""" + + def __init__(self, conn: HTTPConnection, message: str) -> None: + self.conn = conn + self._message = message + super().__init__(f"{conn}: {message}") + + def __reduce__(self) -> _TYPE_REDUCE_RESULT: + # For pickling purposes. + return self.__class__, (None, self._message) + + @property + def pool(self) -> HTTPConnection: + warnings.warn( + "The 'pool' property is deprecated and will be removed " + "in urllib3 v2.1.0. Use 'conn' instead.", + DeprecationWarning, + stacklevel=2, + ) + + return self.conn + + +class NameResolutionError(NewConnectionError): + """Raised when host name resolution fails.""" + + def __init__(self, host: str, conn: HTTPConnection, reason: socket.gaierror): + message = f"Failed to resolve '{host}' ({reason})" + self._host = host + self._reason = reason + super().__init__(conn, message) + + def __reduce__(self) -> _TYPE_REDUCE_RESULT: + # For pickling purposes. + return self.__class__, (self._host, None, self._reason) + + +class EmptyPoolError(PoolError): + """Raised when a pool runs out of connections and no more are allowed.""" + + +class FullPoolError(PoolError): + """Raised when we try to add a connection to a full pool in blocking mode.""" + + +class ClosedPoolError(PoolError): + """Raised when a request enters a pool after the pool has been closed.""" + + +class LocationValueError(ValueError, HTTPError): + """Raised when there is something wrong with a given URL input.""" + + +class LocationParseError(LocationValueError): + """Raised when get_host or similar fails to parse the URL input.""" + + def __init__(self, location: str) -> None: + message = f"Failed to parse: {location}" + super().__init__(message) + + self.location = location + + +class URLSchemeUnknown(LocationValueError): + """Raised when a URL input has an unsupported scheme.""" + + def __init__(self, scheme: str): + message = f"Not supported URL scheme {scheme}" + super().__init__(message) + + self.scheme = scheme + + +class ResponseError(HTTPError): + """Used as a container for an error reason supplied in a MaxRetryError.""" + + GENERIC_ERROR = "too many error responses" + SPECIFIC_ERROR = "too many {status_code} error responses" + + +class SecurityWarning(HTTPWarning): + """Warned when performing security reducing actions""" + + +class InsecureRequestWarning(SecurityWarning): + """Warned when making an unverified HTTPS request.""" + + +class NotOpenSSLWarning(SecurityWarning): + """Warned when using unsupported SSL library""" + + +class SystemTimeWarning(SecurityWarning): + """Warned when system time is suspected to be wrong""" + + +class InsecurePlatformWarning(SecurityWarning): + """Warned when certain TLS/SSL configuration is not available on a platform.""" + + +class DependencyWarning(HTTPWarning): + """ + Warned when an attempt is made to import a module with missing optional + dependencies. + """ + + +class ResponseNotChunked(ProtocolError, ValueError): + """Response needs to be chunked in order to read it as chunks.""" + + +class BodyNotHttplibCompatible(HTTPError): + """ + Body should be :class:`http.client.HTTPResponse` like + (have an fp attribute which returns raw chunks) for read_chunked(). + """ + + +class IncompleteRead(HTTPError, httplib_IncompleteRead): + """ + Response length doesn't match expected Content-Length + + Subclass of :class:`http.client.IncompleteRead` to allow int value + for ``partial`` to avoid creating large objects on streamed reads. + """ + + partial: int # type: ignore[assignment] + expected: int + + def __init__(self, partial: int, expected: int) -> None: + self.partial = partial + self.expected = expected + + def __repr__(self) -> str: + return "IncompleteRead(%i bytes read, %i more expected)" % ( + self.partial, + self.expected, + ) + + +class InvalidChunkLength(HTTPError, httplib_IncompleteRead): + """Invalid chunk length in a chunked response.""" + + def __init__(self, response: HTTPResponse, length: bytes) -> None: + self.partial: int = response.tell() # type: ignore[assignment] + self.expected: int | None = response.length_remaining + self.response = response + self.length = length + + def __repr__(self) -> str: + return "InvalidChunkLength(got length %r, %i bytes read)" % ( + self.length, + self.partial, + ) + + +class InvalidHeader(HTTPError): + """The header provided was somehow invalid.""" + + +class ProxySchemeUnknown(AssertionError, URLSchemeUnknown): + """ProxyManager does not support the supplied scheme""" + + # TODO(t-8ch): Stop inheriting from AssertionError in v2.0. + + def __init__(self, scheme: str | None) -> None: + # 'localhost' is here because our URL parser parses + # localhost:8080 -> scheme=localhost, remove if we fix this. + if scheme == "localhost": + scheme = None + if scheme is None: + message = "Proxy URL had no scheme, should start with http:// or https://" + else: + message = f"Proxy URL had unsupported scheme {scheme}, should use http:// or https://" + super().__init__(message) + + +class ProxySchemeUnsupported(ValueError): + """Fetching HTTPS resources through HTTPS proxies is unsupported""" + + +class HeaderParsingError(HTTPError): + """Raised by assert_header_parsing, but we convert it to a log.warning statement.""" + + def __init__( + self, defects: list[MessageDefect], unparsed_data: bytes | str | None + ) -> None: + message = f"{defects or 'Unknown'}, unparsed data: {unparsed_data!r}" + super().__init__(message) + + +class UnrewindableBodyError(HTTPError): + """urllib3 encountered an error when trying to rewind a body""" diff --git a/venv/lib/python3.13/site-packages/urllib3/fields.py b/venv/lib/python3.13/site-packages/urllib3/fields.py new file mode 100644 index 0000000000000000000000000000000000000000..97c4730cff0df570e1ab47f77e6aa879ec3c36e7 --- /dev/null +++ b/venv/lib/python3.13/site-packages/urllib3/fields.py @@ -0,0 +1,341 @@ +from __future__ import annotations + +import email.utils +import mimetypes +import typing + +_TYPE_FIELD_VALUE = typing.Union[str, bytes] +_TYPE_FIELD_VALUE_TUPLE = typing.Union[ + _TYPE_FIELD_VALUE, + tuple[str, _TYPE_FIELD_VALUE], + tuple[str, _TYPE_FIELD_VALUE, str], +] + + +def guess_content_type( + filename: str | None, default: str = "application/octet-stream" +) -> str: + """ + Guess the "Content-Type" of a file. + + :param filename: + The filename to guess the "Content-Type" of using :mod:`mimetypes`. + :param default: + If no "Content-Type" can be guessed, default to `default`. + """ + if filename: + return mimetypes.guess_type(filename)[0] or default + return default + + +def format_header_param_rfc2231(name: str, value: _TYPE_FIELD_VALUE) -> str: + """ + Helper function to format and quote a single header parameter using the + strategy defined in RFC 2231. + + Particularly useful for header parameters which might contain + non-ASCII values, like file names. This follows + `RFC 2388 Section 4.4 `_. + + :param name: + The name of the parameter, a string expected to be ASCII only. + :param value: + The value of the parameter, provided as ``bytes`` or `str``. + :returns: + An RFC-2231-formatted unicode string. + + .. deprecated:: 2.0.0 + Will be removed in urllib3 v2.1.0. This is not valid for + ``multipart/form-data`` header parameters. + """ + import warnings + + warnings.warn( + "'format_header_param_rfc2231' is deprecated and will be " + "removed in urllib3 v2.1.0. This is not valid for " + "multipart/form-data header parameters.", + DeprecationWarning, + stacklevel=2, + ) + + if isinstance(value, bytes): + value = value.decode("utf-8") + + if not any(ch in value for ch in '"\\\r\n'): + result = f'{name}="{value}"' + try: + result.encode("ascii") + except (UnicodeEncodeError, UnicodeDecodeError): + pass + else: + return result + + value = email.utils.encode_rfc2231(value, "utf-8") + value = f"{name}*={value}" + + return value + + +def format_multipart_header_param(name: str, value: _TYPE_FIELD_VALUE) -> str: + """ + Format and quote a single multipart header parameter. + + This follows the `WHATWG HTML Standard`_ as of 2021/06/10, matching + the behavior of current browser and curl versions. Values are + assumed to be UTF-8. The ``\\n``, ``\\r``, and ``"`` characters are + percent encoded. + + .. _WHATWG HTML Standard: + https://html.spec.whatwg.org/multipage/ + form-control-infrastructure.html#multipart-form-data + + :param name: + The name of the parameter, an ASCII-only ``str``. + :param value: + The value of the parameter, a ``str`` or UTF-8 encoded + ``bytes``. + :returns: + A string ``name="value"`` with the escaped value. + + .. versionchanged:: 2.0.0 + Matches the WHATWG HTML Standard as of 2021/06/10. Control + characters are no longer percent encoded. + + .. versionchanged:: 2.0.0 + Renamed from ``format_header_param_html5`` and + ``format_header_param``. The old names will be removed in + urllib3 v2.1.0. + """ + if isinstance(value, bytes): + value = value.decode("utf-8") + + # percent encode \n \r " + value = value.translate({10: "%0A", 13: "%0D", 34: "%22"}) + return f'{name}="{value}"' + + +def format_header_param_html5(name: str, value: _TYPE_FIELD_VALUE) -> str: + """ + .. deprecated:: 2.0.0 + Renamed to :func:`format_multipart_header_param`. Will be + removed in urllib3 v2.1.0. + """ + import warnings + + warnings.warn( + "'format_header_param_html5' has been renamed to " + "'format_multipart_header_param'. The old name will be " + "removed in urllib3 v2.1.0.", + DeprecationWarning, + stacklevel=2, + ) + return format_multipart_header_param(name, value) + + +def format_header_param(name: str, value: _TYPE_FIELD_VALUE) -> str: + """ + .. deprecated:: 2.0.0 + Renamed to :func:`format_multipart_header_param`. Will be + removed in urllib3 v2.1.0. + """ + import warnings + + warnings.warn( + "'format_header_param' has been renamed to " + "'format_multipart_header_param'. The old name will be " + "removed in urllib3 v2.1.0.", + DeprecationWarning, + stacklevel=2, + ) + return format_multipart_header_param(name, value) + + +class RequestField: + """ + A data container for request body parameters. + + :param name: + The name of this request field. Must be unicode. + :param data: + The data/value body. + :param filename: + An optional filename of the request field. Must be unicode. + :param headers: + An optional dict-like object of headers to initially use for the field. + + .. versionchanged:: 2.0.0 + The ``header_formatter`` parameter is deprecated and will + be removed in urllib3 v2.1.0. + """ + + def __init__( + self, + name: str, + data: _TYPE_FIELD_VALUE, + filename: str | None = None, + headers: typing.Mapping[str, str] | None = None, + header_formatter: typing.Callable[[str, _TYPE_FIELD_VALUE], str] | None = None, + ): + self._name = name + self._filename = filename + self.data = data + self.headers: dict[str, str | None] = {} + if headers: + self.headers = dict(headers) + + if header_formatter is not None: + import warnings + + warnings.warn( + "The 'header_formatter' parameter is deprecated and " + "will be removed in urllib3 v2.1.0.", + DeprecationWarning, + stacklevel=2, + ) + self.header_formatter = header_formatter + else: + self.header_formatter = format_multipart_header_param + + @classmethod + def from_tuples( + cls, + fieldname: str, + value: _TYPE_FIELD_VALUE_TUPLE, + header_formatter: typing.Callable[[str, _TYPE_FIELD_VALUE], str] | None = None, + ) -> RequestField: + """ + A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. + + Supports constructing :class:`~urllib3.fields.RequestField` from + parameter of key/value strings AND key/filetuple. A filetuple is a + (filename, data, MIME type) tuple where the MIME type is optional. + For example:: + + 'foo': 'bar', + 'fakefile': ('foofile.txt', 'contents of foofile'), + 'realfile': ('barfile.txt', open('realfile').read()), + 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), + 'nonamefile': 'contents of nonamefile field', + + Field names and filenames must be unicode. + """ + filename: str | None + content_type: str | None + data: _TYPE_FIELD_VALUE + + if isinstance(value, tuple): + if len(value) == 3: + filename, data, content_type = value + else: + filename, data = value + content_type = guess_content_type(filename) + else: + filename = None + content_type = None + data = value + + request_param = cls( + fieldname, data, filename=filename, header_formatter=header_formatter + ) + request_param.make_multipart(content_type=content_type) + + return request_param + + def _render_part(self, name: str, value: _TYPE_FIELD_VALUE) -> str: + """ + Override this method to change how each multipart header + parameter is formatted. By default, this calls + :func:`format_multipart_header_param`. + + :param name: + The name of the parameter, an ASCII-only ``str``. + :param value: + The value of the parameter, a ``str`` or UTF-8 encoded + ``bytes``. + + :meta public: + """ + return self.header_formatter(name, value) + + def _render_parts( + self, + header_parts: ( + dict[str, _TYPE_FIELD_VALUE | None] + | typing.Sequence[tuple[str, _TYPE_FIELD_VALUE | None]] + ), + ) -> str: + """ + Helper function to format and quote a single header. + + Useful for single headers that are composed of multiple items. E.g., + 'Content-Disposition' fields. + + :param header_parts: + A sequence of (k, v) tuples or a :class:`dict` of (k, v) to format + as `k1="v1"; k2="v2"; ...`. + """ + iterable: typing.Iterable[tuple[str, _TYPE_FIELD_VALUE | None]] + + parts = [] + if isinstance(header_parts, dict): + iterable = header_parts.items() + else: + iterable = header_parts + + for name, value in iterable: + if value is not None: + parts.append(self._render_part(name, value)) + + return "; ".join(parts) + + def render_headers(self) -> str: + """ + Renders the headers for this request field. + """ + lines = [] + + sort_keys = ["Content-Disposition", "Content-Type", "Content-Location"] + for sort_key in sort_keys: + if self.headers.get(sort_key, False): + lines.append(f"{sort_key}: {self.headers[sort_key]}") + + for header_name, header_value in self.headers.items(): + if header_name not in sort_keys: + if header_value: + lines.append(f"{header_name}: {header_value}") + + lines.append("\r\n") + return "\r\n".join(lines) + + def make_multipart( + self, + content_disposition: str | None = None, + content_type: str | None = None, + content_location: str | None = None, + ) -> None: + """ + Makes this request field into a multipart request field. + + This method overrides "Content-Disposition", "Content-Type" and + "Content-Location" headers to the request parameter. + + :param content_disposition: + The 'Content-Disposition' of the request body. Defaults to 'form-data' + :param content_type: + The 'Content-Type' of the request body. + :param content_location: + The 'Content-Location' of the request body. + + """ + content_disposition = (content_disposition or "form-data") + "; ".join( + [ + "", + self._render_parts( + (("name", self._name), ("filename", self._filename)) + ), + ] + ) + + self.headers["Content-Disposition"] = content_disposition + self.headers["Content-Type"] = content_type + self.headers["Content-Location"] = content_location diff --git a/venv/lib/python3.13/site-packages/urllib3/filepost.py b/venv/lib/python3.13/site-packages/urllib3/filepost.py new file mode 100644 index 0000000000000000000000000000000000000000..14f70b05b4778f91137e4a9e7059d7514aa44d28 --- /dev/null +++ b/venv/lib/python3.13/site-packages/urllib3/filepost.py @@ -0,0 +1,89 @@ +from __future__ import annotations + +import binascii +import codecs +import os +import typing +from io import BytesIO + +from .fields import _TYPE_FIELD_VALUE_TUPLE, RequestField + +writer = codecs.lookup("utf-8")[3] + +_TYPE_FIELDS_SEQUENCE = typing.Sequence[ + typing.Union[tuple[str, _TYPE_FIELD_VALUE_TUPLE], RequestField] +] +_TYPE_FIELDS = typing.Union[ + _TYPE_FIELDS_SEQUENCE, + typing.Mapping[str, _TYPE_FIELD_VALUE_TUPLE], +] + + +def choose_boundary() -> str: + """ + Our embarrassingly-simple replacement for mimetools.choose_boundary. + """ + return binascii.hexlify(os.urandom(16)).decode() + + +def iter_field_objects(fields: _TYPE_FIELDS) -> typing.Iterable[RequestField]: + """ + Iterate over fields. + + Supports list of (k, v) tuples and dicts, and lists of + :class:`~urllib3.fields.RequestField`. + + """ + iterable: typing.Iterable[RequestField | tuple[str, _TYPE_FIELD_VALUE_TUPLE]] + + if isinstance(fields, typing.Mapping): + iterable = fields.items() + else: + iterable = fields + + for field in iterable: + if isinstance(field, RequestField): + yield field + else: + yield RequestField.from_tuples(*field) + + +def encode_multipart_formdata( + fields: _TYPE_FIELDS, boundary: str | None = None +) -> tuple[bytes, str]: + """ + Encode a dictionary of ``fields`` using the multipart/form-data MIME format. + + :param fields: + Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`). + Values are processed by :func:`urllib3.fields.RequestField.from_tuples`. + + :param boundary: + If not specified, then a random boundary will be generated using + :func:`urllib3.filepost.choose_boundary`. + """ + body = BytesIO() + if boundary is None: + boundary = choose_boundary() + + for field in iter_field_objects(fields): + body.write(f"--{boundary}\r\n".encode("latin-1")) + + writer(body).write(field.render_headers()) + data = field.data + + if isinstance(data, int): + data = str(data) # Backwards compatibility + + if isinstance(data, str): + writer(body).write(data) + else: + body.write(data) + + body.write(b"\r\n") + + body.write(f"--{boundary}--\r\n".encode("latin-1")) + + content_type = f"multipart/form-data; boundary={boundary}" + + return body.getvalue(), content_type diff --git a/venv/lib/python3.13/site-packages/urllib3/poolmanager.py b/venv/lib/python3.13/site-packages/urllib3/poolmanager.py new file mode 100644 index 0000000000000000000000000000000000000000..5763fea808184690042cca497b3ac358f412c83f --- /dev/null +++ b/venv/lib/python3.13/site-packages/urllib3/poolmanager.py @@ -0,0 +1,653 @@ +from __future__ import annotations + +import functools +import logging +import typing +import warnings +from types import TracebackType +from urllib.parse import urljoin + +from ._collections import HTTPHeaderDict, RecentlyUsedContainer +from ._request_methods import RequestMethods +from .connection import ProxyConfig +from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme +from .exceptions import ( + LocationValueError, + MaxRetryError, + ProxySchemeUnknown, + URLSchemeUnknown, +) +from .response import BaseHTTPResponse +from .util.connection import _TYPE_SOCKET_OPTIONS +from .util.proxy import connection_requires_http_tunnel +from .util.retry import Retry +from .util.timeout import Timeout +from .util.url import Url, parse_url + +if typing.TYPE_CHECKING: + import ssl + + from typing_extensions import Self + +__all__ = ["PoolManager", "ProxyManager", "proxy_from_url"] + + +log = logging.getLogger(__name__) + +SSL_KEYWORDS = ( + "key_file", + "cert_file", + "cert_reqs", + "ca_certs", + "ca_cert_data", + "ssl_version", + "ssl_minimum_version", + "ssl_maximum_version", + "ca_cert_dir", + "ssl_context", + "key_password", + "server_hostname", +) +# Default value for `blocksize` - a new parameter introduced to +# http.client.HTTPConnection & http.client.HTTPSConnection in Python 3.7 +_DEFAULT_BLOCKSIZE = 16384 + + +class PoolKey(typing.NamedTuple): + """ + All known keyword arguments that could be provided to the pool manager, its + pools, or the underlying connections. + + All custom key schemes should include the fields in this key at a minimum. + """ + + key_scheme: str + key_host: str + key_port: int | None + key_timeout: Timeout | float | int | None + key_retries: Retry | bool | int | None + key_block: bool | None + key_source_address: tuple[str, int] | None + key_key_file: str | None + key_key_password: str | None + key_cert_file: str | None + key_cert_reqs: str | None + key_ca_certs: str | None + key_ca_cert_data: str | bytes | None + key_ssl_version: int | str | None + key_ssl_minimum_version: ssl.TLSVersion | None + key_ssl_maximum_version: ssl.TLSVersion | None + key_ca_cert_dir: str | None + key_ssl_context: ssl.SSLContext | None + key_maxsize: int | None + key_headers: frozenset[tuple[str, str]] | None + key__proxy: Url | None + key__proxy_headers: frozenset[tuple[str, str]] | None + key__proxy_config: ProxyConfig | None + key_socket_options: _TYPE_SOCKET_OPTIONS | None + key__socks_options: frozenset[tuple[str, str]] | None + key_assert_hostname: bool | str | None + key_assert_fingerprint: str | None + key_server_hostname: str | None + key_blocksize: int | None + + +def _default_key_normalizer( + key_class: type[PoolKey], request_context: dict[str, typing.Any] +) -> PoolKey: + """ + Create a pool key out of a request context dictionary. + + According to RFC 3986, both the scheme and host are case-insensitive. + Therefore, this function normalizes both before constructing the pool + key for an HTTPS request. If you wish to change this behaviour, provide + alternate callables to ``key_fn_by_scheme``. + + :param key_class: + The class to use when constructing the key. This should be a namedtuple + with the ``scheme`` and ``host`` keys at a minimum. + :type key_class: namedtuple + :param request_context: + A dictionary-like object that contain the context for a request. + :type request_context: dict + + :return: A namedtuple that can be used as a connection pool key. + :rtype: PoolKey + """ + # Since we mutate the dictionary, make a copy first + context = request_context.copy() + context["scheme"] = context["scheme"].lower() + context["host"] = context["host"].lower() + + # These are both dictionaries and need to be transformed into frozensets + for key in ("headers", "_proxy_headers", "_socks_options"): + if key in context and context[key] is not None: + context[key] = frozenset(context[key].items()) + + # The socket_options key may be a list and needs to be transformed into a + # tuple. + socket_opts = context.get("socket_options") + if socket_opts is not None: + context["socket_options"] = tuple(socket_opts) + + # Map the kwargs to the names in the namedtuple - this is necessary since + # namedtuples can't have fields starting with '_'. + for key in list(context.keys()): + context["key_" + key] = context.pop(key) + + # Default to ``None`` for keys missing from the context + for field in key_class._fields: + if field not in context: + context[field] = None + + # Default key_blocksize to _DEFAULT_BLOCKSIZE if missing from the context + if context.get("key_blocksize") is None: + context["key_blocksize"] = _DEFAULT_BLOCKSIZE + + return key_class(**context) + + +#: A dictionary that maps a scheme to a callable that creates a pool key. +#: This can be used to alter the way pool keys are constructed, if desired. +#: Each PoolManager makes a copy of this dictionary so they can be configured +#: globally here, or individually on the instance. +key_fn_by_scheme = { + "http": functools.partial(_default_key_normalizer, PoolKey), + "https": functools.partial(_default_key_normalizer, PoolKey), +} + +pool_classes_by_scheme = {"http": HTTPConnectionPool, "https": HTTPSConnectionPool} + + +class PoolManager(RequestMethods): + """ + Allows for arbitrary requests while transparently keeping track of + necessary connection pools for you. + + :param num_pools: + Number of connection pools to cache before discarding the least + recently used pool. + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + + :param \\**connection_pool_kw: + Additional parameters are used to create fresh + :class:`urllib3.connectionpool.ConnectionPool` instances. + + Example: + + .. code-block:: python + + import urllib3 + + http = urllib3.PoolManager(num_pools=2) + + resp1 = http.request("GET", "https://google.com/") + resp2 = http.request("GET", "https://google.com/mail") + resp3 = http.request("GET", "https://yahoo.com/") + + print(len(http.pools)) + # 2 + + """ + + proxy: Url | None = None + proxy_config: ProxyConfig | None = None + + def __init__( + self, + num_pools: int = 10, + headers: typing.Mapping[str, str] | None = None, + **connection_pool_kw: typing.Any, + ) -> None: + super().__init__(headers) + if "retries" in connection_pool_kw: + retries = connection_pool_kw["retries"] + if not isinstance(retries, Retry): + # When Retry is initialized, raise_on_redirect is based + # on a redirect boolean value. + # But requests made via a pool manager always set + # redirect to False, and raise_on_redirect always ends + # up being False consequently. + # Here we fix the issue by setting raise_on_redirect to + # a value needed by the pool manager without considering + # the redirect boolean. + raise_on_redirect = retries is not False + retries = Retry.from_int(retries, redirect=False) + retries.raise_on_redirect = raise_on_redirect + connection_pool_kw = connection_pool_kw.copy() + connection_pool_kw["retries"] = retries + self.connection_pool_kw = connection_pool_kw + + self.pools: RecentlyUsedContainer[PoolKey, HTTPConnectionPool] + self.pools = RecentlyUsedContainer(num_pools) + + # Locally set the pool classes and keys so other PoolManagers can + # override them. + self.pool_classes_by_scheme = pool_classes_by_scheme + self.key_fn_by_scheme = key_fn_by_scheme.copy() + + def __enter__(self) -> Self: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> typing.Literal[False]: + self.clear() + # Return False to re-raise any potential exceptions + return False + + def _new_pool( + self, + scheme: str, + host: str, + port: int, + request_context: dict[str, typing.Any] | None = None, + ) -> HTTPConnectionPool: + """ + Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and + any additional pool keyword arguments. + + If ``request_context`` is provided, it is provided as keyword arguments + to the pool class used. This method is used to actually create the + connection pools handed out by :meth:`connection_from_url` and + companion methods. It is intended to be overridden for customization. + """ + pool_cls: type[HTTPConnectionPool] = self.pool_classes_by_scheme[scheme] + if request_context is None: + request_context = self.connection_pool_kw.copy() + + # Default blocksize to _DEFAULT_BLOCKSIZE if missing or explicitly + # set to 'None' in the request_context. + if request_context.get("blocksize") is None: + request_context["blocksize"] = _DEFAULT_BLOCKSIZE + + # Although the context has everything necessary to create the pool, + # this function has historically only used the scheme, host, and port + # in the positional args. When an API change is acceptable these can + # be removed. + for key in ("scheme", "host", "port"): + request_context.pop(key, None) + + if scheme == "http": + for kw in SSL_KEYWORDS: + request_context.pop(kw, None) + + return pool_cls(host, port, **request_context) + + def clear(self) -> None: + """ + Empty our store of pools and direct them all to close. + + This will not affect in-flight connections, but they will not be + re-used after completion. + """ + self.pools.clear() + + def connection_from_host( + self, + host: str | None, + port: int | None = None, + scheme: str | None = "http", + pool_kwargs: dict[str, typing.Any] | None = None, + ) -> HTTPConnectionPool: + """ + Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme. + + If ``port`` isn't given, it will be derived from the ``scheme`` using + ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is + provided, it is merged with the instance's ``connection_pool_kw`` + variable and used to create the new connection pool, if one is + needed. + """ + + if not host: + raise LocationValueError("No host specified.") + + request_context = self._merge_pool_kwargs(pool_kwargs) + request_context["scheme"] = scheme or "http" + if not port: + port = port_by_scheme.get(request_context["scheme"].lower(), 80) + request_context["port"] = port + request_context["host"] = host + + return self.connection_from_context(request_context) + + def connection_from_context( + self, request_context: dict[str, typing.Any] + ) -> HTTPConnectionPool: + """ + Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context. + + ``request_context`` must at least contain the ``scheme`` key and its + value must be a key in ``key_fn_by_scheme`` instance variable. + """ + if "strict" in request_context: + warnings.warn( + "The 'strict' parameter is no longer needed on Python 3+. " + "This will raise an error in urllib3 v2.1.0.", + DeprecationWarning, + ) + request_context.pop("strict") + + scheme = request_context["scheme"].lower() + pool_key_constructor = self.key_fn_by_scheme.get(scheme) + if not pool_key_constructor: + raise URLSchemeUnknown(scheme) + pool_key = pool_key_constructor(request_context) + + return self.connection_from_pool_key(pool_key, request_context=request_context) + + def connection_from_pool_key( + self, pool_key: PoolKey, request_context: dict[str, typing.Any] + ) -> HTTPConnectionPool: + """ + Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key. + + ``pool_key`` should be a namedtuple that only contains immutable + objects. At a minimum it must have the ``scheme``, ``host``, and + ``port`` fields. + """ + with self.pools.lock: + # If the scheme, host, or port doesn't match existing open + # connections, open a new ConnectionPool. + pool = self.pools.get(pool_key) + if pool: + return pool + + # Make a fresh ConnectionPool of the desired type + scheme = request_context["scheme"] + host = request_context["host"] + port = request_context["port"] + pool = self._new_pool(scheme, host, port, request_context=request_context) + self.pools[pool_key] = pool + + return pool + + def connection_from_url( + self, url: str, pool_kwargs: dict[str, typing.Any] | None = None + ) -> HTTPConnectionPool: + """ + Similar to :func:`urllib3.connectionpool.connection_from_url`. + + If ``pool_kwargs`` is not provided and a new pool needs to be + constructed, ``self.connection_pool_kw`` is used to initialize + the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs`` + is provided, it is used instead. Note that if a new pool does not + need to be created for the request, the provided ``pool_kwargs`` are + not used. + """ + u = parse_url(url) + return self.connection_from_host( + u.host, port=u.port, scheme=u.scheme, pool_kwargs=pool_kwargs + ) + + def _merge_pool_kwargs( + self, override: dict[str, typing.Any] | None + ) -> dict[str, typing.Any]: + """ + Merge a dictionary of override values for self.connection_pool_kw. + + This does not modify self.connection_pool_kw and returns a new dict. + Any keys in the override dictionary with a value of ``None`` are + removed from the merged dictionary. + """ + base_pool_kwargs = self.connection_pool_kw.copy() + if override: + for key, value in override.items(): + if value is None: + try: + del base_pool_kwargs[key] + except KeyError: + pass + else: + base_pool_kwargs[key] = value + return base_pool_kwargs + + def _proxy_requires_url_absolute_form(self, parsed_url: Url) -> bool: + """ + Indicates if the proxy requires the complete destination URL in the + request. Normally this is only needed when not using an HTTP CONNECT + tunnel. + """ + if self.proxy is None: + return False + + return not connection_requires_http_tunnel( + self.proxy, self.proxy_config, parsed_url.scheme + ) + + def urlopen( # type: ignore[override] + self, method: str, url: str, redirect: bool = True, **kw: typing.Any + ) -> BaseHTTPResponse: + """ + Same as :meth:`urllib3.HTTPConnectionPool.urlopen` + with custom cross-host redirect logic and only sends the request-uri + portion of the ``url``. + + The given ``url`` parameter must be absolute, such that an appropriate + :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it. + """ + u = parse_url(url) + + if u.scheme is None: + warnings.warn( + "URLs without a scheme (ie 'https://') are deprecated and will raise an error " + "in a future version of urllib3. To avoid this DeprecationWarning ensure all URLs " + "start with 'https://' or 'http://'. Read more in this issue: " + "https://github.com/urllib3/urllib3/issues/2920", + category=DeprecationWarning, + stacklevel=2, + ) + + conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme) + + kw["assert_same_host"] = False + kw["redirect"] = False + + if "headers" not in kw: + kw["headers"] = self.headers + + if self._proxy_requires_url_absolute_form(u): + response = conn.urlopen(method, url, **kw) + else: + response = conn.urlopen(method, u.request_uri, **kw) + + redirect_location = redirect and response.get_redirect_location() + if not redirect_location: + return response + + # Support relative URLs for redirecting. + redirect_location = urljoin(url, redirect_location) + + if response.status == 303: + # Change the method according to RFC 9110, Section 15.4.4. + method = "GET" + # And lose the body not to transfer anything sensitive. + kw["body"] = None + kw["headers"] = HTTPHeaderDict(kw["headers"])._prepare_for_method_change() + + retries = kw.get("retries", response.retries) + if not isinstance(retries, Retry): + retries = Retry.from_int(retries, redirect=redirect) + + # Strip headers marked as unsafe to forward to the redirected location. + # Check remove_headers_on_redirect to avoid a potential network call within + # conn.is_same_host() which may use socket.gethostbyname() in the future. + if retries.remove_headers_on_redirect and not conn.is_same_host( + redirect_location + ): + new_headers = kw["headers"].copy() + for header in kw["headers"]: + if header.lower() in retries.remove_headers_on_redirect: + new_headers.pop(header, None) + kw["headers"] = new_headers + + try: + retries = retries.increment(method, url, response=response, _pool=conn) + except MaxRetryError: + if retries.raise_on_redirect: + response.drain_conn() + raise + return response + + kw["retries"] = retries + kw["redirect"] = redirect + + log.info("Redirecting %s -> %s", url, redirect_location) + + response.drain_conn() + return self.urlopen(method, redirect_location, **kw) + + +class ProxyManager(PoolManager): + """ + Behaves just like :class:`PoolManager`, but sends all requests through + the defined proxy, using the CONNECT method for HTTPS URLs. + + :param proxy_url: + The URL of the proxy to be used. + + :param proxy_headers: + A dictionary containing headers that will be sent to the proxy. In case + of HTTP they are being sent with each request, while in the + HTTPS/CONNECT case they are sent only once. Could be used for proxy + authentication. + + :param proxy_ssl_context: + The proxy SSL context is used to establish the TLS connection to the + proxy when using HTTPS proxies. + + :param use_forwarding_for_https: + (Defaults to False) If set to True will forward requests to the HTTPS + proxy to be made on behalf of the client instead of creating a TLS + tunnel via the CONNECT method. **Enabling this flag means that request + and response headers and content will be visible from the HTTPS proxy** + whereas tunneling keeps request and response headers and content + private. IP address, target hostname, SNI, and port are always visible + to an HTTPS proxy even when this flag is disabled. + + :param proxy_assert_hostname: + The hostname of the certificate to verify against. + + :param proxy_assert_fingerprint: + The fingerprint of the certificate to verify against. + + Example: + + .. code-block:: python + + import urllib3 + + proxy = urllib3.ProxyManager("https://localhost:3128/") + + resp1 = proxy.request("GET", "https://google.com/") + resp2 = proxy.request("GET", "https://httpbin.org/") + + print(len(proxy.pools)) + # 1 + + resp3 = proxy.request("GET", "https://httpbin.org/") + resp4 = proxy.request("GET", "https://twitter.com/") + + print(len(proxy.pools)) + # 3 + + """ + + def __init__( + self, + proxy_url: str, + num_pools: int = 10, + headers: typing.Mapping[str, str] | None = None, + proxy_headers: typing.Mapping[str, str] | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + use_forwarding_for_https: bool = False, + proxy_assert_hostname: None | str | typing.Literal[False] = None, + proxy_assert_fingerprint: str | None = None, + **connection_pool_kw: typing.Any, + ) -> None: + if isinstance(proxy_url, HTTPConnectionPool): + str_proxy_url = f"{proxy_url.scheme}://{proxy_url.host}:{proxy_url.port}" + else: + str_proxy_url = proxy_url + proxy = parse_url(str_proxy_url) + + if proxy.scheme not in ("http", "https"): + raise ProxySchemeUnknown(proxy.scheme) + + if not proxy.port: + port = port_by_scheme.get(proxy.scheme, 80) + proxy = proxy._replace(port=port) + + self.proxy = proxy + self.proxy_headers = proxy_headers or {} + self.proxy_ssl_context = proxy_ssl_context + self.proxy_config = ProxyConfig( + proxy_ssl_context, + use_forwarding_for_https, + proxy_assert_hostname, + proxy_assert_fingerprint, + ) + + connection_pool_kw["_proxy"] = self.proxy + connection_pool_kw["_proxy_headers"] = self.proxy_headers + connection_pool_kw["_proxy_config"] = self.proxy_config + + super().__init__(num_pools, headers, **connection_pool_kw) + + def connection_from_host( + self, + host: str | None, + port: int | None = None, + scheme: str | None = "http", + pool_kwargs: dict[str, typing.Any] | None = None, + ) -> HTTPConnectionPool: + if scheme == "https": + return super().connection_from_host( + host, port, scheme, pool_kwargs=pool_kwargs + ) + + return super().connection_from_host( + self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs # type: ignore[union-attr] + ) + + def _set_proxy_headers( + self, url: str, headers: typing.Mapping[str, str] | None = None + ) -> typing.Mapping[str, str]: + """ + Sets headers needed by proxies: specifically, the Accept and Host + headers. Only sets headers not provided by the user. + """ + headers_ = {"Accept": "*/*"} + + netloc = parse_url(url).netloc + if netloc: + headers_["Host"] = netloc + + if headers: + headers_.update(headers) + return headers_ + + def urlopen( # type: ignore[override] + self, method: str, url: str, redirect: bool = True, **kw: typing.Any + ) -> BaseHTTPResponse: + "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute." + u = parse_url(url) + if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme): + # For connections using HTTP CONNECT, httplib sets the necessary + # headers on the CONNECT to the proxy. If we're not using CONNECT, + # we'll definitely need to set 'Host' at the very least. + headers = kw.get("headers", self.headers) + kw["headers"] = self._set_proxy_headers(url, headers) + + return super().urlopen(method, url, redirect=redirect, **kw) + + +def proxy_from_url(url: str, **kw: typing.Any) -> ProxyManager: + return ProxyManager(proxy_url=url, **kw) diff --git a/venv/lib/python3.13/site-packages/urllib3/py.typed b/venv/lib/python3.13/site-packages/urllib3/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..5f3ea3d919363f08ab03edbc85b6099bc4df5647 --- /dev/null +++ b/venv/lib/python3.13/site-packages/urllib3/py.typed @@ -0,0 +1,2 @@ +# Instruct type checkers to look for inline type annotations in this package. +# See PEP 561. diff --git a/venv/lib/python3.13/site-packages/urllib3/response.py b/venv/lib/python3.13/site-packages/urllib3/response.py new file mode 100644 index 0000000000000000000000000000000000000000..5632dab3b2f93df91bf132384c3820a6b5518f43 --- /dev/null +++ b/venv/lib/python3.13/site-packages/urllib3/response.py @@ -0,0 +1,1307 @@ +from __future__ import annotations + +import collections +import io +import json as _json +import logging +import re +import socket +import sys +import typing +import warnings +import zlib +from contextlib import contextmanager +from http.client import HTTPMessage as _HttplibHTTPMessage +from http.client import HTTPResponse as _HttplibHTTPResponse +from socket import timeout as SocketTimeout + +if typing.TYPE_CHECKING: + from ._base_connection import BaseHTTPConnection + +try: + try: + import brotlicffi as brotli # type: ignore[import-not-found] + except ImportError: + import brotli # type: ignore[import-not-found] +except ImportError: + brotli = None + +from . import util +from ._base_connection import _TYPE_BODY +from ._collections import HTTPHeaderDict +from .connection import BaseSSLError, HTTPConnection, HTTPException +from .exceptions import ( + BodyNotHttplibCompatible, + DecodeError, + HTTPError, + IncompleteRead, + InvalidChunkLength, + InvalidHeader, + ProtocolError, + ReadTimeoutError, + ResponseNotChunked, + SSLError, +) +from .util.response import is_fp_closed, is_response_to_head +from .util.retry import Retry + +if typing.TYPE_CHECKING: + from .connectionpool import HTTPConnectionPool + +log = logging.getLogger(__name__) + + +class ContentDecoder: + def decompress(self, data: bytes) -> bytes: + raise NotImplementedError() + + def flush(self) -> bytes: + raise NotImplementedError() + + +class DeflateDecoder(ContentDecoder): + def __init__(self) -> None: + self._first_try = True + self._data = b"" + self._obj = zlib.decompressobj() + + def decompress(self, data: bytes) -> bytes: + if not data: + return data + + if not self._first_try: + return self._obj.decompress(data) + + self._data += data + try: + decompressed = self._obj.decompress(data) + if decompressed: + self._first_try = False + self._data = None # type: ignore[assignment] + return decompressed + except zlib.error: + self._first_try = False + self._obj = zlib.decompressobj(-zlib.MAX_WBITS) + try: + return self.decompress(self._data) + finally: + self._data = None # type: ignore[assignment] + + def flush(self) -> bytes: + return self._obj.flush() + + +class GzipDecoderState: + FIRST_MEMBER = 0 + OTHER_MEMBERS = 1 + SWALLOW_DATA = 2 + + +class GzipDecoder(ContentDecoder): + def __init__(self) -> None: + self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) + self._state = GzipDecoderState.FIRST_MEMBER + + def decompress(self, data: bytes) -> bytes: + ret = bytearray() + if self._state == GzipDecoderState.SWALLOW_DATA or not data: + return bytes(ret) + while True: + try: + ret += self._obj.decompress(data) + except zlib.error: + previous_state = self._state + # Ignore data after the first error + self._state = GzipDecoderState.SWALLOW_DATA + if previous_state == GzipDecoderState.OTHER_MEMBERS: + # Allow trailing garbage acceptable in other gzip clients + return bytes(ret) + raise + data = self._obj.unused_data + if not data: + return bytes(ret) + self._state = GzipDecoderState.OTHER_MEMBERS + self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) + + def flush(self) -> bytes: + return self._obj.flush() + + +if brotli is not None: + + class BrotliDecoder(ContentDecoder): + # Supports both 'brotlipy' and 'Brotli' packages + # since they share an import name. The top branches + # are for 'brotlipy' and bottom branches for 'Brotli' + def __init__(self) -> None: + self._obj = brotli.Decompressor() + if hasattr(self._obj, "decompress"): + setattr(self, "decompress", self._obj.decompress) + else: + setattr(self, "decompress", self._obj.process) + + def flush(self) -> bytes: + if hasattr(self._obj, "flush"): + return self._obj.flush() # type: ignore[no-any-return] + return b"" + + +try: + # Python 3.14+ + from compression import zstd # type: ignore[import-not-found] # noqa: F401 + + HAS_ZSTD = True + + class ZstdDecoder(ContentDecoder): + def __init__(self) -> None: + self._obj = zstd.ZstdDecompressor() + + def decompress(self, data: bytes) -> bytes: + if not data: + return b"" + data_parts = [self._obj.decompress(data)] + while self._obj.eof and self._obj.unused_data: + unused_data = self._obj.unused_data + self._obj = zstd.ZstdDecompressor() + data_parts.append(self._obj.decompress(unused_data)) + return b"".join(data_parts) + + def flush(self) -> bytes: + if not self._obj.eof: + raise DecodeError("Zstandard data is incomplete") + return b"" + +except ImportError: + try: + # Python 3.13 and earlier require the 'zstandard' module. + import zstandard as zstd + + # The package 'zstandard' added the 'eof' property starting + # in v0.18.0 which we require to ensure a complete and + # valid zstd stream was fed into the ZstdDecoder. + # See: https://github.com/urllib3/urllib3/pull/2624 + _zstd_version = tuple( + map(int, re.search(r"^([0-9]+)\.([0-9]+)", zstd.__version__).groups()) # type: ignore[union-attr] + ) + if _zstd_version < (0, 18): # Defensive: + raise ImportError("zstandard module doesn't have eof") + except (AttributeError, ImportError, ValueError): # Defensive: + HAS_ZSTD = False + else: + HAS_ZSTD = True + + class ZstdDecoder(ContentDecoder): # type: ignore[no-redef] + def __init__(self) -> None: + self._obj = zstd.ZstdDecompressor().decompressobj() + + def decompress(self, data: bytes) -> bytes: + if not data: + return b"" + data_parts = [self._obj.decompress(data)] + while self._obj.eof and self._obj.unused_data: + unused_data = self._obj.unused_data + self._obj = zstd.ZstdDecompressor().decompressobj() + data_parts.append(self._obj.decompress(unused_data)) + return b"".join(data_parts) + + def flush(self) -> bytes: + ret = self._obj.flush() # note: this is a no-op + if not self._obj.eof: + raise DecodeError("Zstandard data is incomplete") + return ret # type: ignore[no-any-return] + + +class MultiDecoder(ContentDecoder): + """ + From RFC7231: + If one or more encodings have been applied to a representation, the + sender that applied the encodings MUST generate a Content-Encoding + header field that lists the content codings in the order in which + they were applied. + """ + + def __init__(self, modes: str) -> None: + self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")] + + def flush(self) -> bytes: + return self._decoders[0].flush() + + def decompress(self, data: bytes) -> bytes: + for d in reversed(self._decoders): + data = d.decompress(data) + return data + + +def _get_decoder(mode: str) -> ContentDecoder: + if "," in mode: + return MultiDecoder(mode) + + # According to RFC 9110 section 8.4.1.3, recipients should + # consider x-gzip equivalent to gzip + if mode in ("gzip", "x-gzip"): + return GzipDecoder() + + if brotli is not None and mode == "br": + return BrotliDecoder() + + if HAS_ZSTD and mode == "zstd": + return ZstdDecoder() + + return DeflateDecoder() + + +class BytesQueueBuffer: + """Memory-efficient bytes buffer + + To return decoded data in read() and still follow the BufferedIOBase API, we need a + buffer to always return the correct amount of bytes. + + This buffer should be filled using calls to put() + + Our maximum memory usage is determined by the sum of the size of: + + * self.buffer, which contains the full data + * the largest chunk that we will copy in get() + + The worst case scenario is a single chunk, in which case we'll make a full copy of + the data inside get(). + """ + + def __init__(self) -> None: + self.buffer: typing.Deque[bytes] = collections.deque() + self._size: int = 0 + + def __len__(self) -> int: + return self._size + + def put(self, data: bytes) -> None: + self.buffer.append(data) + self._size += len(data) + + def get(self, n: int) -> bytes: + if n == 0: + return b"" + elif not self.buffer: + raise RuntimeError("buffer is empty") + elif n < 0: + raise ValueError("n should be > 0") + + fetched = 0 + ret = io.BytesIO() + while fetched < n: + remaining = n - fetched + chunk = self.buffer.popleft() + chunk_length = len(chunk) + if remaining < chunk_length: + left_chunk, right_chunk = chunk[:remaining], chunk[remaining:] + ret.write(left_chunk) + self.buffer.appendleft(right_chunk) + self._size -= remaining + break + else: + ret.write(chunk) + self._size -= chunk_length + fetched += chunk_length + + if not self.buffer: + break + + return ret.getvalue() + + def get_all(self) -> bytes: + buffer = self.buffer + if not buffer: + assert self._size == 0 + return b"" + if len(buffer) == 1: + result = buffer.pop() + else: + ret = io.BytesIO() + ret.writelines(buffer.popleft() for _ in range(len(buffer))) + result = ret.getvalue() + self._size = 0 + return result + + +class BaseHTTPResponse(io.IOBase): + CONTENT_DECODERS = ["gzip", "x-gzip", "deflate"] + if brotli is not None: + CONTENT_DECODERS += ["br"] + if HAS_ZSTD: + CONTENT_DECODERS += ["zstd"] + REDIRECT_STATUSES = [301, 302, 303, 307, 308] + + DECODER_ERROR_CLASSES: tuple[type[Exception], ...] = (IOError, zlib.error) + if brotli is not None: + DECODER_ERROR_CLASSES += (brotli.error,) + + if HAS_ZSTD: + DECODER_ERROR_CLASSES += (zstd.ZstdError,) + + def __init__( + self, + *, + headers: typing.Mapping[str, str] | typing.Mapping[bytes, bytes] | None = None, + status: int, + version: int, + version_string: str, + reason: str | None, + decode_content: bool, + request_url: str | None, + retries: Retry | None = None, + ) -> None: + if isinstance(headers, HTTPHeaderDict): + self.headers = headers + else: + self.headers = HTTPHeaderDict(headers) # type: ignore[arg-type] + self.status = status + self.version = version + self.version_string = version_string + self.reason = reason + self.decode_content = decode_content + self._has_decoded_content = False + self._request_url: str | None = request_url + self.retries = retries + + self.chunked = False + tr_enc = self.headers.get("transfer-encoding", "").lower() + # Don't incur the penalty of creating a list and then discarding it + encodings = (enc.strip() for enc in tr_enc.split(",")) + if "chunked" in encodings: + self.chunked = True + + self._decoder: ContentDecoder | None = None + self.length_remaining: int | None + + def get_redirect_location(self) -> str | None | typing.Literal[False]: + """ + Should we redirect and where to? + + :returns: Truthy redirect location string if we got a redirect status + code and valid location. ``None`` if redirect status and no + location. ``False`` if not a redirect status code. + """ + if self.status in self.REDIRECT_STATUSES: + return self.headers.get("location") + return False + + @property + def data(self) -> bytes: + raise NotImplementedError() + + def json(self) -> typing.Any: + """ + Deserializes the body of the HTTP response as a Python object. + + The body of the HTTP response must be encoded using UTF-8, as per + `RFC 8529 Section 8.1 `_. + + To use a custom JSON decoder pass the result of :attr:`HTTPResponse.data` to + your custom decoder instead. + + If the body of the HTTP response is not decodable to UTF-8, a + `UnicodeDecodeError` will be raised. If the body of the HTTP response is not a + valid JSON document, a `json.JSONDecodeError` will be raised. + + Read more :ref:`here `. + + :returns: The body of the HTTP response as a Python object. + """ + data = self.data.decode("utf-8") + return _json.loads(data) + + @property + def url(self) -> str | None: + raise NotImplementedError() + + @url.setter + def url(self, url: str | None) -> None: + raise NotImplementedError() + + @property + def connection(self) -> BaseHTTPConnection | None: + raise NotImplementedError() + + @property + def retries(self) -> Retry | None: + return self._retries + + @retries.setter + def retries(self, retries: Retry | None) -> None: + # Override the request_url if retries has a redirect location. + if retries is not None and retries.history: + self.url = retries.history[-1].redirect_location + self._retries = retries + + def stream( + self, amt: int | None = 2**16, decode_content: bool | None = None + ) -> typing.Iterator[bytes]: + raise NotImplementedError() + + def read( + self, + amt: int | None = None, + decode_content: bool | None = None, + cache_content: bool = False, + ) -> bytes: + raise NotImplementedError() + + def read1( + self, + amt: int | None = None, + decode_content: bool | None = None, + ) -> bytes: + raise NotImplementedError() + + def read_chunked( + self, + amt: int | None = None, + decode_content: bool | None = None, + ) -> typing.Iterator[bytes]: + raise NotImplementedError() + + def release_conn(self) -> None: + raise NotImplementedError() + + def drain_conn(self) -> None: + raise NotImplementedError() + + def shutdown(self) -> None: + raise NotImplementedError() + + def close(self) -> None: + raise NotImplementedError() + + def _init_decoder(self) -> None: + """ + Set-up the _decoder attribute if necessary. + """ + # Note: content-encoding value should be case-insensitive, per RFC 7230 + # Section 3.2 + content_encoding = self.headers.get("content-encoding", "").lower() + if self._decoder is None: + if content_encoding in self.CONTENT_DECODERS: + self._decoder = _get_decoder(content_encoding) + elif "," in content_encoding: + encodings = [ + e.strip() + for e in content_encoding.split(",") + if e.strip() in self.CONTENT_DECODERS + ] + if encodings: + self._decoder = _get_decoder(content_encoding) + + def _decode( + self, data: bytes, decode_content: bool | None, flush_decoder: bool + ) -> bytes: + """ + Decode the data passed in and potentially flush the decoder. + """ + if not decode_content: + if self._has_decoded_content: + raise RuntimeError( + "Calling read(decode_content=False) is not supported after " + "read(decode_content=True) was called." + ) + return data + + try: + if self._decoder: + data = self._decoder.decompress(data) + self._has_decoded_content = True + except self.DECODER_ERROR_CLASSES as e: + content_encoding = self.headers.get("content-encoding", "").lower() + raise DecodeError( + "Received response with content-encoding: %s, but " + "failed to decode it." % content_encoding, + e, + ) from e + if flush_decoder: + data += self._flush_decoder() + + return data + + def _flush_decoder(self) -> bytes: + """ + Flushes the decoder. Should only be called if the decoder is actually + being used. + """ + if self._decoder: + return self._decoder.decompress(b"") + self._decoder.flush() + return b"" + + # Compatibility methods for `io` module + def readinto(self, b: bytearray) -> int: + temp = self.read(len(b)) + if len(temp) == 0: + return 0 + else: + b[: len(temp)] = temp + return len(temp) + + # Compatibility methods for http.client.HTTPResponse + def getheaders(self) -> HTTPHeaderDict: + warnings.warn( + "HTTPResponse.getheaders() is deprecated and will be removed " + "in urllib3 v2.6.0. Instead access HTTPResponse.headers directly.", + category=DeprecationWarning, + stacklevel=2, + ) + return self.headers + + def getheader(self, name: str, default: str | None = None) -> str | None: + warnings.warn( + "HTTPResponse.getheader() is deprecated and will be removed " + "in urllib3 v2.6.0. Instead use HTTPResponse.headers.get(name, default).", + category=DeprecationWarning, + stacklevel=2, + ) + return self.headers.get(name, default) + + # Compatibility method for http.cookiejar + def info(self) -> HTTPHeaderDict: + return self.headers + + def geturl(self) -> str | None: + return self.url + + +class HTTPResponse(BaseHTTPResponse): + """ + HTTP Response container. + + Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is + loaded and decoded on-demand when the ``data`` property is accessed. This + class is also compatible with the Python standard library's :mod:`io` + module, and can hence be treated as a readable object in the context of that + framework. + + Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`: + + :param preload_content: + If True, the response's body will be preloaded during construction. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + + :param original_response: + When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse` + object, it's convenient to include the original for debug purposes. It's + otherwise unused. + + :param retries: + The retries contains the last :class:`~urllib3.util.retry.Retry` that + was used during the request. + + :param enforce_content_length: + Enforce content length checking. Body returned by server must match + value of Content-Length header, if present. Otherwise, raise error. + """ + + def __init__( + self, + body: _TYPE_BODY = "", + headers: typing.Mapping[str, str] | typing.Mapping[bytes, bytes] | None = None, + status: int = 0, + version: int = 0, + version_string: str = "HTTP/?", + reason: str | None = None, + preload_content: bool = True, + decode_content: bool = True, + original_response: _HttplibHTTPResponse | None = None, + pool: HTTPConnectionPool | None = None, + connection: HTTPConnection | None = None, + msg: _HttplibHTTPMessage | None = None, + retries: Retry | None = None, + enforce_content_length: bool = True, + request_method: str | None = None, + request_url: str | None = None, + auto_close: bool = True, + sock_shutdown: typing.Callable[[int], None] | None = None, + ) -> None: + super().__init__( + headers=headers, + status=status, + version=version, + version_string=version_string, + reason=reason, + decode_content=decode_content, + request_url=request_url, + retries=retries, + ) + + self.enforce_content_length = enforce_content_length + self.auto_close = auto_close + + self._body = None + self._fp: _HttplibHTTPResponse | None = None + self._original_response = original_response + self._fp_bytes_read = 0 + self.msg = msg + + if body and isinstance(body, (str, bytes)): + self._body = body + + self._pool = pool + self._connection = connection + + if hasattr(body, "read"): + self._fp = body # type: ignore[assignment] + self._sock_shutdown = sock_shutdown + + # Are we using the chunked-style of transfer encoding? + self.chunk_left: int | None = None + + # Determine length of response + self.length_remaining = self._init_length(request_method) + + # Used to return the correct amount of bytes for partial read()s + self._decoded_buffer = BytesQueueBuffer() + + # If requested, preload the body. + if preload_content and not self._body: + self._body = self.read(decode_content=decode_content) + + def release_conn(self) -> None: + if not self._pool or not self._connection: + return None + + self._pool._put_conn(self._connection) + self._connection = None + + def drain_conn(self) -> None: + """ + Read and discard any remaining HTTP response data in the response connection. + + Unread data in the HTTPResponse connection blocks the connection from being released back to the pool. + """ + try: + self.read() + except (HTTPError, OSError, BaseSSLError, HTTPException): + pass + + @property + def data(self) -> bytes: + # For backwards-compat with earlier urllib3 0.4 and earlier. + if self._body: + return self._body # type: ignore[return-value] + + if self._fp: + return self.read(cache_content=True) + + return None # type: ignore[return-value] + + @property + def connection(self) -> HTTPConnection | None: + return self._connection + + def isclosed(self) -> bool: + return is_fp_closed(self._fp) + + def tell(self) -> int: + """ + Obtain the number of bytes pulled over the wire so far. May differ from + the amount of content returned by :meth:``urllib3.response.HTTPResponse.read`` + if bytes are encoded on the wire (e.g, compressed). + """ + return self._fp_bytes_read + + def _init_length(self, request_method: str | None) -> int | None: + """ + Set initial length value for Response content if available. + """ + length: int | None + content_length: str | None = self.headers.get("content-length") + + if content_length is not None: + if self.chunked: + # This Response will fail with an IncompleteRead if it can't be + # received as chunked. This method falls back to attempt reading + # the response before raising an exception. + log.warning( + "Received response with both Content-Length and " + "Transfer-Encoding set. This is expressly forbidden " + "by RFC 7230 sec 3.3.2. Ignoring Content-Length and " + "attempting to process response as Transfer-Encoding: " + "chunked." + ) + return None + + try: + # RFC 7230 section 3.3.2 specifies multiple content lengths can + # be sent in a single Content-Length header + # (e.g. Content-Length: 42, 42). This line ensures the values + # are all valid ints and that as long as the `set` length is 1, + # all values are the same. Otherwise, the header is invalid. + lengths = {int(val) for val in content_length.split(",")} + if len(lengths) > 1: + raise InvalidHeader( + "Content-Length contained multiple " + "unmatching values (%s)" % content_length + ) + length = lengths.pop() + except ValueError: + length = None + else: + if length < 0: + length = None + + else: # if content_length is None + length = None + + # Convert status to int for comparison + # In some cases, httplib returns a status of "_UNKNOWN" + try: + status = int(self.status) + except ValueError: + status = 0 + + # Check for responses that shouldn't include a body + if status in (204, 304) or 100 <= status < 200 or request_method == "HEAD": + length = 0 + + return length + + @contextmanager + def _error_catcher(self) -> typing.Generator[None]: + """ + Catch low-level python exceptions, instead re-raising urllib3 + variants, so that low-level exceptions are not leaked in the + high-level api. + + On exit, release the connection back to the pool. + """ + clean_exit = False + + try: + try: + yield + + except SocketTimeout as e: + # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but + # there is yet no clean way to get at it from this context. + raise ReadTimeoutError(self._pool, None, "Read timed out.") from e # type: ignore[arg-type] + + except BaseSSLError as e: + # FIXME: Is there a better way to differentiate between SSLErrors? + if "read operation timed out" not in str(e): + # SSL errors related to framing/MAC get wrapped and reraised here + raise SSLError(e) from e + + raise ReadTimeoutError(self._pool, None, "Read timed out.") from e # type: ignore[arg-type] + + except IncompleteRead as e: + if ( + e.expected is not None + and e.partial is not None + and e.expected == -e.partial + ): + arg = "Response may not contain content." + else: + arg = f"Connection broken: {e!r}" + raise ProtocolError(arg, e) from e + + except (HTTPException, OSError) as e: + raise ProtocolError(f"Connection broken: {e!r}", e) from e + + # If no exception is thrown, we should avoid cleaning up + # unnecessarily. + clean_exit = True + finally: + # If we didn't terminate cleanly, we need to throw away our + # connection. + if not clean_exit: + # The response may not be closed but we're not going to use it + # anymore so close it now to ensure that the connection is + # released back to the pool. + if self._original_response: + self._original_response.close() + + # Closing the response may not actually be sufficient to close + # everything, so if we have a hold of the connection close that + # too. + if self._connection: + self._connection.close() + + # If we hold the original response but it's closed now, we should + # return the connection back to the pool. + if self._original_response and self._original_response.isclosed(): + self.release_conn() + + def _fp_read( + self, + amt: int | None = None, + *, + read1: bool = False, + ) -> bytes: + """ + Read a response with the thought that reading the number of bytes + larger than can fit in a 32-bit int at a time via SSL in some + known cases leads to an overflow error that has to be prevented + if `amt` or `self.length_remaining` indicate that a problem may + happen. + + The known cases: + * CPython < 3.9.7 because of a bug + https://github.com/urllib3/urllib3/issues/2513#issuecomment-1152559900. + * urllib3 injected with pyOpenSSL-backed SSL-support. + * CPython < 3.10 only when `amt` does not fit 32-bit int. + """ + assert self._fp + c_int_max = 2**31 - 1 + if ( + (amt and amt > c_int_max) + or ( + amt is None + and self.length_remaining + and self.length_remaining > c_int_max + ) + ) and (util.IS_PYOPENSSL or sys.version_info < (3, 10)): + if read1: + return self._fp.read1(c_int_max) + buffer = io.BytesIO() + # Besides `max_chunk_amt` being a maximum chunk size, it + # affects memory overhead of reading a response by this + # method in CPython. + # `c_int_max` equal to 2 GiB - 1 byte is the actual maximum + # chunk size that does not lead to an overflow error, but + # 256 MiB is a compromise. + max_chunk_amt = 2**28 + while amt is None or amt != 0: + if amt is not None: + chunk_amt = min(amt, max_chunk_amt) + amt -= chunk_amt + else: + chunk_amt = max_chunk_amt + data = self._fp.read(chunk_amt) + if not data: + break + buffer.write(data) + del data # to reduce peak memory usage by `max_chunk_amt`. + return buffer.getvalue() + elif read1: + return self._fp.read1(amt) if amt is not None else self._fp.read1() + else: + # StringIO doesn't like amt=None + return self._fp.read(amt) if amt is not None else self._fp.read() + + def _raw_read( + self, + amt: int | None = None, + *, + read1: bool = False, + ) -> bytes: + """ + Reads `amt` of bytes from the socket. + """ + if self._fp is None: + return None # type: ignore[return-value] + + fp_closed = getattr(self._fp, "closed", False) + + with self._error_catcher(): + data = self._fp_read(amt, read1=read1) if not fp_closed else b"" + if amt is not None and amt != 0 and not data: + # Platform-specific: Buggy versions of Python. + # Close the connection when no data is returned + # + # This is redundant to what httplib/http.client _should_ + # already do. However, versions of python released before + # December 15, 2012 (http://bugs.python.org/issue16298) do + # not properly close the connection in all cases. There is + # no harm in redundantly calling close. + self._fp.close() + if ( + self.enforce_content_length + and self.length_remaining is not None + and self.length_remaining != 0 + ): + # This is an edge case that httplib failed to cover due + # to concerns of backward compatibility. We're + # addressing it here to make sure IncompleteRead is + # raised during streaming, so all calls with incorrect + # Content-Length are caught. + raise IncompleteRead(self._fp_bytes_read, self.length_remaining) + elif read1 and ( + (amt != 0 and not data) or self.length_remaining == len(data) + ): + # All data has been read, but `self._fp.read1` in + # CPython 3.12 and older doesn't always close + # `http.client.HTTPResponse`, so we close it here. + # See https://github.com/python/cpython/issues/113199 + self._fp.close() + + if data: + self._fp_bytes_read += len(data) + if self.length_remaining is not None: + self.length_remaining -= len(data) + return data + + def read( + self, + amt: int | None = None, + decode_content: bool | None = None, + cache_content: bool = False, + ) -> bytes: + """ + Similar to :meth:`http.client.HTTPResponse.read`, but with two additional + parameters: ``decode_content`` and ``cache_content``. + + :param amt: + How much of the content to read. If specified, caching is skipped + because it doesn't make sense to cache partial content as the full + response. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + + :param cache_content: + If True, will save the returned data such that the same result is + returned despite of the state of the underlying file object. This + is useful if you want the ``.data`` property to continue working + after having ``.read()`` the file object. (Overridden if ``amt`` is + set.) + """ + self._init_decoder() + if decode_content is None: + decode_content = self.decode_content + + if amt and amt < 0: + # Negative numbers and `None` should be treated the same. + amt = None + elif amt is not None: + cache_content = False + + if len(self._decoded_buffer) >= amt: + return self._decoded_buffer.get(amt) + + data = self._raw_read(amt) + + flush_decoder = amt is None or (amt != 0 and not data) + + if not data and len(self._decoded_buffer) == 0: + return data + + if amt is None: + data = self._decode(data, decode_content, flush_decoder) + if cache_content: + self._body = data + else: + # do not waste memory on buffer when not decoding + if not decode_content: + if self._has_decoded_content: + raise RuntimeError( + "Calling read(decode_content=False) is not supported after " + "read(decode_content=True) was called." + ) + return data + + decoded_data = self._decode(data, decode_content, flush_decoder) + self._decoded_buffer.put(decoded_data) + + while len(self._decoded_buffer) < amt and data: + # TODO make sure to initially read enough data to get past the headers + # For example, the GZ file header takes 10 bytes, we don't want to read + # it one byte at a time + data = self._raw_read(amt) + decoded_data = self._decode(data, decode_content, flush_decoder) + self._decoded_buffer.put(decoded_data) + data = self._decoded_buffer.get(amt) + + return data + + def read1( + self, + amt: int | None = None, + decode_content: bool | None = None, + ) -> bytes: + """ + Similar to ``http.client.HTTPResponse.read1`` and documented + in :meth:`io.BufferedReader.read1`, but with an additional parameter: + ``decode_content``. + + :param amt: + How much of the content to read. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + """ + if decode_content is None: + decode_content = self.decode_content + if amt and amt < 0: + # Negative numbers and `None` should be treated the same. + amt = None + # try and respond without going to the network + if self._has_decoded_content: + if not decode_content: + raise RuntimeError( + "Calling read1(decode_content=False) is not supported after " + "read1(decode_content=True) was called." + ) + if len(self._decoded_buffer) > 0: + if amt is None: + return self._decoded_buffer.get_all() + return self._decoded_buffer.get(amt) + if amt == 0: + return b"" + + # FIXME, this method's type doesn't say returning None is possible + data = self._raw_read(amt, read1=True) + if not decode_content or data is None: + return data + + self._init_decoder() + while True: + flush_decoder = not data + decoded_data = self._decode(data, decode_content, flush_decoder) + self._decoded_buffer.put(decoded_data) + if decoded_data or flush_decoder: + break + data = self._raw_read(8192, read1=True) + + if amt is None: + return self._decoded_buffer.get_all() + return self._decoded_buffer.get(amt) + + def stream( + self, amt: int | None = 2**16, decode_content: bool | None = None + ) -> typing.Generator[bytes]: + """ + A generator wrapper for the read() method. A call will block until + ``amt`` bytes have been read from the connection or until the + connection is closed. + + :param amt: + How much of the content to read. The generator will return up to + much data per iteration, but may return less. This is particularly + likely when using compressed data. However, the empty string will + never be returned. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + """ + if self.chunked and self.supports_chunked_reads(): + yield from self.read_chunked(amt, decode_content=decode_content) + else: + while not is_fp_closed(self._fp) or len(self._decoded_buffer) > 0: + data = self.read(amt=amt, decode_content=decode_content) + + if data: + yield data + + # Overrides from io.IOBase + def readable(self) -> bool: + return True + + def shutdown(self) -> None: + if not self._sock_shutdown: + raise ValueError("Cannot shutdown socket as self._sock_shutdown is not set") + if self._connection is None: + raise RuntimeError( + "Cannot shutdown as connection has already been released to the pool" + ) + self._sock_shutdown(socket.SHUT_RD) + + def close(self) -> None: + self._sock_shutdown = None + + if not self.closed and self._fp: + self._fp.close() + + if self._connection: + self._connection.close() + + if not self.auto_close: + io.IOBase.close(self) + + @property + def closed(self) -> bool: + if not self.auto_close: + return io.IOBase.closed.__get__(self) # type: ignore[no-any-return] + elif self._fp is None: + return True + elif hasattr(self._fp, "isclosed"): + return self._fp.isclosed() + elif hasattr(self._fp, "closed"): + return self._fp.closed + else: + return True + + def fileno(self) -> int: + if self._fp is None: + raise OSError("HTTPResponse has no file to get a fileno from") + elif hasattr(self._fp, "fileno"): + return self._fp.fileno() + else: + raise OSError( + "The file-like object this HTTPResponse is wrapped " + "around has no file descriptor" + ) + + def flush(self) -> None: + if ( + self._fp is not None + and hasattr(self._fp, "flush") + and not getattr(self._fp, "closed", False) + ): + return self._fp.flush() + + def supports_chunked_reads(self) -> bool: + """ + Checks if the underlying file-like object looks like a + :class:`http.client.HTTPResponse` object. We do this by testing for + the fp attribute. If it is present we assume it returns raw chunks as + processed by read_chunked(). + """ + return hasattr(self._fp, "fp") + + def _update_chunk_length(self) -> None: + # First, we'll figure out length of a chunk and then + # we'll try to read it from socket. + if self.chunk_left is not None: + return None + line = self._fp.fp.readline() # type: ignore[union-attr] + line = line.split(b";", 1)[0] + try: + self.chunk_left = int(line, 16) + except ValueError: + self.close() + if line: + # Invalid chunked protocol response, abort. + raise InvalidChunkLength(self, line) from None + else: + # Truncated at start of next chunk + raise ProtocolError("Response ended prematurely") from None + + def _handle_chunk(self, amt: int | None) -> bytes: + returned_chunk = None + if amt is None: + chunk = self._fp._safe_read(self.chunk_left) # type: ignore[union-attr] + returned_chunk = chunk + self._fp._safe_read(2) # type: ignore[union-attr] # Toss the CRLF at the end of the chunk. + self.chunk_left = None + elif self.chunk_left is not None and amt < self.chunk_left: + value = self._fp._safe_read(amt) # type: ignore[union-attr] + self.chunk_left = self.chunk_left - amt + returned_chunk = value + elif amt == self.chunk_left: + value = self._fp._safe_read(amt) # type: ignore[union-attr] + self._fp._safe_read(2) # type: ignore[union-attr] # Toss the CRLF at the end of the chunk. + self.chunk_left = None + returned_chunk = value + else: # amt > self.chunk_left + returned_chunk = self._fp._safe_read(self.chunk_left) # type: ignore[union-attr] + self._fp._safe_read(2) # type: ignore[union-attr] # Toss the CRLF at the end of the chunk. + self.chunk_left = None + return returned_chunk # type: ignore[no-any-return] + + def read_chunked( + self, amt: int | None = None, decode_content: bool | None = None + ) -> typing.Generator[bytes]: + """ + Similar to :meth:`HTTPResponse.read`, but with an additional + parameter: ``decode_content``. + + :param amt: + How much of the content to read. If specified, caching is skipped + because it doesn't make sense to cache partial content as the full + response. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + """ + self._init_decoder() + # FIXME: Rewrite this method and make it a class with a better structured logic. + if not self.chunked: + raise ResponseNotChunked( + "Response is not chunked. " + "Header 'transfer-encoding: chunked' is missing." + ) + if not self.supports_chunked_reads(): + raise BodyNotHttplibCompatible( + "Body should be http.client.HTTPResponse like. " + "It should have have an fp attribute which returns raw chunks." + ) + + with self._error_catcher(): + # Don't bother reading the body of a HEAD request. + if self._original_response and is_response_to_head(self._original_response): + self._original_response.close() + return None + + # If a response is already read and closed + # then return immediately. + if self._fp.fp is None: # type: ignore[union-attr] + return None + + if amt and amt < 0: + # Negative numbers and `None` should be treated the same, + # but httplib handles only `None` correctly. + amt = None + + while True: + self._update_chunk_length() + if self.chunk_left == 0: + break + chunk = self._handle_chunk(amt) + decoded = self._decode( + chunk, decode_content=decode_content, flush_decoder=False + ) + if decoded: + yield decoded + + if decode_content: + # On CPython and PyPy, we should never need to flush the + # decoder. However, on Jython we *might* need to, so + # lets defensively do it anyway. + decoded = self._flush_decoder() + if decoded: # Platform-specific: Jython. + yield decoded + + # Chunk content ends with \r\n: discard it. + while self._fp is not None: + line = self._fp.fp.readline() + if not line: + # Some sites may not end with '\r\n'. + break + if line == b"\r\n": + break + + # We read everything; close the "file". + if self._original_response: + self._original_response.close() + + @property + def url(self) -> str | None: + """ + Returns the URL that was the source of this response. + If the request that generated this response redirected, this method + will return the final redirect location. + """ + return self._request_url + + @url.setter + def url(self, url: str) -> None: + self._request_url = url + + def __iter__(self) -> typing.Iterator[bytes]: + buffer: list[bytes] = [] + for chunk in self.stream(decode_content=True): + if b"\n" in chunk: + chunks = chunk.split(b"\n") + yield b"".join(buffer) + chunks[0] + b"\n" + for x in chunks[1:-1]: + yield x + b"\n" + if chunks[-1]: + buffer = [chunks[-1]] + else: + buffer = [] + else: + buffer.append(chunk) + if buffer: + yield b"".join(buffer)